output.var = params$output.var
transform.abs = FALSE
log.pred = params$log.pred
norm.pred = FALSE
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret
message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 7
## $ output.var : chr "y3"
## $ log.pred : logi TRUE
## $ algo.forward.caret : logi TRUE
## $ algo.backward.caret: logi TRUE
## $ algo.stepwise.caret: logi TRUE
## $ algo.LASSO.caret : logi TRUE
## $ algo.LARS.caret : logi TRUE
# Setup Labels
output.var.tr = if (log.pred == TRUE) paste0(output.var,'.log') else output.var.tr = output.var
feat = read.csv('../../Data/features_highprec.csv')
labels = read.csv('../../Data/labels.csv')
predictors = names(dplyr::select(feat,-JobName))
data.ori = inner_join(feat,labels,by='JobName')
#data.ori = inner_join(feat,select_at(labels,c('JobName',output.var)),by='JobName')
cc = complete.cases(data.ori)
data.notComplete = data.ori[! cc,]
data = data.ori[cc,] %>% select_at(c(predictors,output.var,'JobName'))
message('Original cases: ',nrow(data.ori))
## Original cases: 10000
message('Non-Complete cases: ',nrow(data.notComplete))
## Non-Complete cases: 3020
message('Complete cases: ',nrow(data))
## Complete cases: 6980
summary(dplyr::select_at(data,c('JobName',output.var)))
## JobName y3
## Job_00001: 1 Min. : 95.91
## Job_00002: 1 1st Qu.:118.29
## Job_00003: 1 Median :124.03
## Job_00004: 1 Mean :125.40
## Job_00007: 1 3rd Qu.:131.06
## Job_00008: 1 Max. :193.73
## (Other) :6974
The Output Variable y3 shows right skewness, so will proceed with a log transformation
df=gather(select_at(data,output.var))
ggplot(df, aes(x=value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density()
#stat_function(fun = dnorm, n = 100, args = list(mean = mean(df$value), sd = sd(df$value)))
ggplot(gather(select_at(data,output.var)), aes(sample=value)) +
stat_qq() +
facet_wrap(~key, scales = 'free',ncol=4)
if(log.pred==TRUE) data[[output.var.tr]] = log(data[[output.var]],10) else
data[[output.var.tr]] = data[[output.var]]
df=gather(select_at(data,c(output.var,output.var.tr)))
ggplot(df, aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
# stat_function(fun = dnorm, n = 100, args = list(mean = mean(df$value), sd = sd(df$value)))
facet_wrap(~key, scales = 'free',ncol=2)
ggplot(gather(select_at(data,c(output.var,output.var.tr))), aes(sample=value)) +
stat_qq() +
facet_wrap(~key, scales = 'free',ncol=4)
Normalization of y3 using bestNormalize package. (suggested orderNorm) This is cool, but I think is too far for the objective of the project
t=bestNormalize::bestNormalize(data[[output.var]])
t
## Best Normalizing transformation with 6980 Observations
## Estimated Normality Statistics (Pearson P / df, lower => more normal):
## - No transform: 2.9288
## - Box-Cox: 1.4325
## - Log_b(x+a): 1.9649
## - sqrt(x+a): 2.4152
## - exp(x): 748.6115
## - arcsinh(x): 1.9655
## - Yeo-Johnson: 1.1802
## - orderNorm: 1.1333
## Estimation method: Out-of-sample via CV with 10 folds and 5 repeats
##
## Based off these, bestNormalize chose:
## orderNorm Transformation with 6980 nonmissing obs and no ties
## - Original quantiles:
## 0% 25% 50% 75% 100%
## 95.913 118.289 124.030 131.059 193.726
qqnorm(data[[output.var]])
qqnorm(predict(t))
orderNorm() is a rank-based procedure by which the values of a vector are mapped to their percentile, which is then mapped to the same percentile of the normal distribution. Without the presence of ties, this essentially guarantees that the transformation leads to a uniform distribution
All predictors show a Fat-Tail situation, where the two tails are very tall, and a low distribution around the mean. The orderNorm transformation can help (see [Best Normalizator] section)
Histograms
cols = c('x11','x18','stat98','x7','stat110')
df=gather(select_at(data,cols))
ggplot(df, aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
# stat_function(fun = dnorm, n = 100, args = list(mean = mean(df$value), sd = sd(df$value)))
facet_wrap(~key, scales = 'free',ncol=3)
# ggplot(gather(select_at(data,cols)), aes(sample=value)) +
# stat_qq()+
# facet_wrap(~key, scales = 'free',ncol=2)
lapply(select_at(data,cols),summary)
## $x11
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 9.000e-08 9.494e-08 1.001e-07 1.001e-07 1.052e-07 1.100e-07
##
## $x18
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 1.500 3.147 4.769 4.772 6.418 7.999
##
## $stat98
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -2.998619 -1.551882 -0.015993 -0.005946 1.528405 2.999499
##
## $x7
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.700 1.266 1.854 1.852 2.446 3.000
##
## $stat110
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -2.999543 -1.496865 -0.002193 -0.004129 1.504273 2.999563
Scatter plot vs. output variable **y3.log
d = gather(dplyr::select_at(data,c(cols,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light green',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=3)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
All indicators have a strong indication of Fat-Tails
df=gather(select_at(data,predictors))
ggplot(df, aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
# stat_function(fun = dnorm, n = 100, args = list(mean = mean(df$value), sd = sd(df$value)))
facet_wrap(~key, scales = 'free',ncol=4)
#chart.Correlation(select(data,-JobName), pch=21)
t=as.data.frame(round(cor(dplyr::select(data,-one_of(output.var.tr,'JobName'))
,select_at(data,output.var.tr)),4)) %>%
rownames_to_column(var='variable') %>% filter(variable != !!output.var) %>% arrange(-y3.log)
#DT::datatable(t)
message("Top Positive")
## Top Positive
kable(head(arrange(t,desc(y3.log)),20))
| variable | y3.log |
|---|---|
| x18 | 0.3120 |
| x7 | 0.2091 |
| stat98 | 0.1784 |
| x9 | 0.1127 |
| x17 | 0.0611 |
| x16 | 0.0489 |
| x10 | 0.0472 |
| x21 | 0.0412 |
| x11 | 0.0322 |
| x8 | 0.0318 |
| stat156 | 0.0287 |
| stat23 | 0.0234 |
| stat100 | 0.0206 |
| stat144 | 0.0203 |
| stat59 | 0.0202 |
| stat60 | 0.0199 |
| stat195 | 0.0199 |
| stat141 | 0.0194 |
| stat73 | 0.0192 |
| stat197 | 0.0185 |
message("Top Negative")
## Top Negative
kable(head(arrange(t,y3.log),20))
| variable | y3.log |
|---|---|
| stat110 | -0.1594 |
| x4 | -0.0603 |
| stat13 | -0.0345 |
| stat41 | -0.0345 |
| stat14 | -0.0317 |
| stat149 | -0.0309 |
| stat113 | -0.0279 |
| stat4 | -0.0248 |
| stat106 | -0.0236 |
| stat146 | -0.0236 |
| stat186 | -0.0217 |
| stat91 | -0.0210 |
| stat214 | -0.0209 |
| stat5 | -0.0207 |
| stat22 | -0.0202 |
| stat39 | -0.0202 |
| stat175 | -0.0194 |
| stat187 | -0.0193 |
| stat128 | -0.0192 |
| stat37 | -0.0191 |
#chart.Correlation(select(data,-JobName), pch=21)
t=as.data.frame(round(cor(dplyr::select(data,-one_of('JobName'))),4))
#DT::datatable(t,options=list(scrollX=T))
message("Showing only 10 variables")
## Showing only 10 variables
kable(t[1:10,1:10])
| x1 | x2 | x3 | x4 | x5 | x6 | x7 | x8 | x9 | x10 | |
|---|---|---|---|---|---|---|---|---|---|---|
| x1 | 1.0000 | 0.0034 | -0.0028 | 0.0085 | 0.0068 | 0.0159 | 0.0264 | -0.0012 | 0.0142 | 0.0013 |
| x2 | 0.0034 | 1.0000 | -0.0057 | 0.0004 | -0.0094 | -0.0101 | 0.0089 | 0.0078 | 0.0049 | -0.0214 |
| x3 | -0.0028 | -0.0057 | 1.0000 | 0.0029 | 0.0046 | 0.0006 | -0.0105 | -0.0002 | 0.0167 | -0.0137 |
| x4 | 0.0085 | 0.0004 | 0.0029 | 1.0000 | -0.0059 | 0.0104 | 0.0098 | 0.0053 | 0.0061 | -0.0023 |
| x5 | 0.0068 | -0.0094 | 0.0046 | -0.0059 | 1.0000 | 0.0016 | -0.0027 | 0.0081 | 0.0259 | -0.0081 |
| x6 | 0.0159 | -0.0101 | 0.0006 | 0.0104 | 0.0016 | 1.0000 | 0.0200 | -0.0157 | 0.0117 | -0.0072 |
| x7 | 0.0264 | 0.0089 | -0.0105 | 0.0098 | -0.0027 | 0.0200 | 1.0000 | -0.0018 | -0.0069 | -0.0221 |
| x8 | -0.0012 | 0.0078 | -0.0002 | 0.0053 | 0.0081 | -0.0157 | -0.0018 | 1.0000 | 0.0142 | -0.0004 |
| x9 | 0.0142 | 0.0049 | 0.0167 | 0.0061 | 0.0259 | 0.0117 | -0.0069 | 0.0142 | 1.0000 | 0.0149 |
| x10 | 0.0013 | -0.0214 | -0.0137 | -0.0023 | -0.0081 | -0.0072 | -0.0221 | -0.0004 | 0.0149 | 1.0000 |
Scatter plots with all predictors and the output variable (y3.log)
d = gather(dplyr::select_at(data,c(predictors,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light blue',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=4)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
No Multicollinearity among predictors
Showing Top predictor by VIF Value
vifDF = usdm::vif(select_at(data,predictors)) %>% arrange(desc(VIF))
head(vifDF,15)
## Variables VIF
## 1 stat113 1.065780
## 2 stat20 1.059708
## 3 stat6 1.059477
## 4 stat59 1.059223
## 5 stat137 1.059173
## 6 stat202 1.058690
## 7 stat79 1.058641
## 8 stat145 1.058589
## 9 stat87 1.058549
## 10 stat112 1.058275
## 11 stat50 1.058100
## 12 stat105 1.057876
## 13 stat175 1.057776
## 14 stat46 1.057758
## 15 stat171 1.057441
data.tr=data %>%
mutate(x18.sqrt = sqrt(x18))
cols=c('x18','x18.sqrt')
# ggplot(gather(select_at(data.tr,cols)), aes(value)) +
# geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
# geom_density() +
# facet_wrap(~key, scales = 'free',ncol=4)
d = gather(dplyr::select_at(data.tr,c(cols,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light blue',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=4)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
#removing unwanted variables
data.tr=data.tr %>%
dplyr::select_at(names(data.tr)[! names(data.tr) %in% c('x18','y3','JobName')])
data=data.tr
label.names=output.var.tr
data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)
data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)
plot.diagnostics <- function(model, train) {
plot(model)
residuals = resid(model) # Plotted above in plot(lm.out)
r.standard = rstandard(model)
r.student = rstudent(model)
df = data.frame(x=predict(model,train),y=r.student)
p=ggplot(data=df,aes(x=x,y=y)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
geom_hline(yintercept = 0,size=1)+
ylab("Student Residuals") +
xlab("Predicted Values")+
ggtitle("Student Residual Plot")
plot(p)
df = data.frame(x=predict(model,train),y=r.standard)
p=ggplot(data=df,aes(x=x,y=y)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
geom_hline(yintercept = c(-2,0,2),size=1)+
ylab("Student Residuals") +
xlab("Predicted Values")+
ggtitle("Student Residual Plot")
plot(p)
# Histogram
df=data.frame(r.student)
p=ggplot(data=df,aes(r.student)) +
geom_histogram(aes(y=..density..),bins = 50,fill='blue',alpha=0.6) +
stat_function(fun = dnorm, n = 100, args = list(mean = 0, sd = 1)) +
ylab("Density")+
xlab("Studentized Residuals")+
ggtitle("Distribution of Studentized Residuals")
plot(p)
# http://www.stat.columbia.edu/~martin/W2024/R7.pdf
# Influential plots
inf.meas = influence.measures(model)
# print (summary(inf.meas)) # too much data
# Leverage plot
lev = hat(model.matrix(model))
df=tibble::rownames_to_column(as.data.frame(lev),'id')
p=ggplot(data=df,aes(x=as.numeric(id),y=lev)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
ylab('Leverage - check') +
xlab('Index')
plot(p)
# Cook's Distance
cd = cooks.distance(model)
df=tibble::rownames_to_column(as.data.frame(cd),'id')
p=ggplot(data=df,aes(x=as.numeric(id),y=cd)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
geom_text(data=filter(df,cd>15/nrow(train)),aes(label=id),check_overlap=T,size=3,vjust=-.5)+
ylab('Cooks distances') +
geom_hline(yintercept = c(4/nrow(train),0),size=1)+
xlab('Index')
plot(p)
print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = ""))
print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = ""))
return(cd)
}
# function to set up random seeds
# Based on http://jaehyeon-kim.github.io/2015/05/Setup-Random-Seeds-on-Caret-Package.html
setCaretSeeds <- function(method = "cv", numbers = 1, repeats = 1, tunes = NULL, seed = 1701) {
#B is the number of resamples and integer vector of M (numbers + tune length if any)
B <- if (method == "cv") numbers
else if(method == "repeatedcv") numbers * repeats
else NULL
if(is.null(length)) {
seeds <- NULL
} else {
set.seed(seed = seed)
seeds <- vector(mode = "list", length = B)
seeds <- lapply(seeds, function(x) sample.int(n = 1000000
, size = numbers + ifelse(is.null(tunes), 0, tunes)))
seeds[[length(seeds) + 1]] <- sample.int(n = 1000000, size = 1)
}
# return seeds
seeds
}
train.caret.glmselect = function(formula, data, method
,subopt = NULL, feature.names
, train.control = NULL, tune.grid = NULL, pre.proc = NULL){
if(is.null(train.control)){
train.control <- trainControl(method = "cv"
,number = 10
,seeds = setCaretSeeds(method = "cv"
, numbers = 10
, seed = 1701)
,search = "grid"
,verboseIter = TRUE
,allowParallel = TRUE
)
}
if(is.null(tune.grid)){
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
tune.grid = data.frame(nvmax = 1:length(feature.names))
}
if (method == 'glmnet' && subopt == 'LASSO'){
# Will only show 1 Lambda value during training, but that is OK
# https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
# Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
lambda = 10^seq(-2,0, length =100)
alpha = c(1)
tune.grid = expand.grid(alpha = alpha,lambda = lambda)
}
if (method == 'lars'){
# https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
fraction = seq(0, 1, length = 100)
tune.grid = expand.grid(fraction = fraction)
pre.proc = c("center", "scale")
}
}
# http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
cl <- makeCluster(ceiling(detectCores()*0.85)) # use 75% of cores only, leave rest for other tasks
registerDoParallel(cl)
set.seed(1)
# note that the seed has to actually be set just before this function is called
# settign is above just not ensure reproducibility for some reason
model.caret <- caret::train(formula
, data = data
, method = method
, tuneGrid = tune.grid
, trControl = train.control
, preProc = pre.proc
)
stopCluster(cl)
registerDoSEQ() # register sequential engine in case you are not using this function anymore
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
print("All models results")
print(model.caret$results) # all model results
print("Best Model")
print(model.caret$bestTune) # best model
model = model.caret$finalModel
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-nvmax) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=2,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
# leap function does not support studentized residuals
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
#geom_density(color='lightblue4') +
stat_function(fun = dnorm, n = 100, args = list(mean = mean(dataPlot$res)
, sd = sd(dataPlot$res)),color='lightblue4')
theme_light()
plot(residHistogram)
id = rownames(model.caret$bestTune)
# Provides the coefficients of the best model
# regsubsets doens return a full model (see documentation of regsubset), so we need to recalcualte themodel
# https://stackoverflow.com/questions/13063762/how-to-obtain-a-lm-object-from-regsubsets
print("Coefficients of final model:")
coefs <- coef(model, id=id)
#calculate the model to the the coef intervals
nams <- names(coefs)
nams <- nams[!nams %in% "(Intercept)"]
response <- as.character(formula[[2]])
form <- as.formula(paste(response, paste(nams, collapse = " + "), sep = " ~ "))
mod <- lm(form, data = data)
#coefs
#coef(mod)
print(car::Confint(mod))
return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram
,modelLM=mod))
}
if (method == 'glmnet' && subopt == 'LASSO'){
print(model.caret)
print(plot(model.caret))
print(model.caret$bestTune)
print(model.caret$results)
model=model.caret$finalModel
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-lambda) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=2,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
#geom_density(color='lightblue4') +
stat_function(fun = dnorm, n = 100, args = list(mean = mean(dataPlot$res)
, sd = sd(dataPlot$res)),color='lightblue4')
theme_light()
plot(residHistogram)
print("Coefficients")
#no interval for glmnet: https://stackoverflow.com/questions/39750965/confidence-intervals-for-ridge-regression
t=coef(model,s=model.caret$bestTune$lambda)
model.coef = t[which(t[,1]!=0),]
print(as.data.frame(model.coef))
id = NULL # not really needed but added for consistency
return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
}
if (method == 'lars'){
print(model.caret)
print(plot(model.caret))
print(model.caret$bestTune)
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-fraction) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=2,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
#geom_density(color='lightblue4') +
stat_function(fun = dnorm, n = 100, args = list(mean = mean(dataPlot$res)
, sd = sd(dataPlot$res)),color='lightblue4')
theme_light()
plot(residHistogram)
print("Coefficients")
t=coef(model.caret$finalModel,s=model.caret$bestTune$fraction,mode='fraction')
model.coef = t[which(t!=0)]
print(model.coef)
id = NULL # not really needed but added for consistency
return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
}
}
# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
#form <- as.formula(object$call[[2]])
mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
coefi <- coef(object, id = id)
xvars <- names(coefi)
return(mat[,xvars]%*%coefi)
}
test.model = function(model, test, level=0.95
,draw.limits = FALSE, good = 0.1, ok = 0.15
,method = NULL, subopt = NULL
,id = NULL, formula, feature.names, label.names
,transformation = NULL){
## if using caret for glm select equivalent functionality,
## need to pass formula (full is ok as it will select subset of variables from there)
if (is.null(method)){
pred = predict(model, newdata=test, interval="confidence", level = level)
}
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
}
if (method == 'glmnet' && subopt == 'LASSO'){
xtest = as.matrix(test[,feature.names])
pred=as.data.frame(predict(model, xtest))
}
if (method == 'lars'){
pred=as.data.frame(predict(model, newdata = test))
}
# Summary of predicted values
print ("Summary of predicted values: ")
print(summary(pred[,1]))
test.mse = mean((test[,label.names]-pred[,1])^2)
print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
if(log.pred == TRUE || norm.pred == TRUE){
# plot transformewd comparison first
df=data.frame(x=test[,label.names],y=pred[,1])
ggplot(df,aes(x=x,y=y)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
geom_abline(slope=1,intercept=0,color='black',size=1) +
#scale_y_continuous(limits=c(min(df),max(df)))+
xlab("Actual (Transformed)")+
ylab("Predicted (Transformed)")
}
if (log.pred == FALSE && norm.pred == FALSE){
x = test[,label.names]
y = pred[,1]
}
if (log.pred == TRUE){
x = 10^test[,label.names]
y = 10^pred[,1]
}
if (norm.pred == TRUE){
x = predict(transformation, test[,label.names], inverse = TRUE)
y = predict(transformation, pred[,1], inverse = TRUE)
}
df=data.frame(x,y)
ggplot(df,aes(x,y)) +
geom_point(color='blue',alpha=0.5,shape=20,size=2) +
geom_abline(slope=c(1+good,1-good,1+ok,1-ok)
,intercept=rep(0,4),color=c('dark green','dark green','dark red','dark red'),size=1,alpha=0.8) +
#scale_y_continuous(limits=c(min(df),max(df)))+
xlab("Actual")+
ylab("Predicted")
}
n <- names(data.train)
formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")
," ~", paste(n[!n %in% label.names], collapse = " + ")))
grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))
print(formula)
## y3.log ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 +
## x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 +
## x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 +
## stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 +
## stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 +
## stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 +
## stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 +
## stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 +
## stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 +
## stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 +
## stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 +
## stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 +
## stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 +
## stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 +
## stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 +
## stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 +
## stat99 + stat100 + stat101 + stat102 + stat103 + stat104 +
## stat105 + stat106 + stat107 + stat108 + stat109 + stat110 +
## stat111 + stat112 + stat113 + stat114 + stat115 + stat116 +
## stat117 + stat118 + stat119 + stat120 + stat121 + stat122 +
## stat123 + stat124 + stat125 + stat126 + stat127 + stat128 +
## stat129 + stat130 + stat131 + stat132 + stat133 + stat134 +
## stat135 + stat136 + stat137 + stat138 + stat139 + stat140 +
## stat141 + stat142 + stat143 + stat144 + stat145 + stat146 +
## stat147 + stat148 + stat149 + stat150 + stat151 + stat152 +
## stat153 + stat154 + stat155 + stat156 + stat157 + stat158 +
## stat159 + stat160 + stat161 + stat162 + stat163 + stat164 +
## stat165 + stat166 + stat167 + stat168 + stat169 + stat170 +
## stat171 + stat172 + stat173 + stat174 + stat175 + stat176 +
## stat177 + stat178 + stat179 + stat180 + stat181 + stat182 +
## stat183 + stat184 + stat185 + stat186 + stat187 + stat188 +
## stat189 + stat190 + stat191 + stat192 + stat193 + stat194 +
## stat195 + stat196 + stat197 + stat198 + stat199 + stat200 +
## stat201 + stat202 + stat203 + stat204 + stat205 + stat206 +
## stat207 + stat208 + stat209 + stat210 + stat211 + stat212 +
## stat213 + stat214 + stat215 + stat216 + stat217 + x18.sqrt
print(grand.mean.formula)
## y3.log ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]
model.full = lm(formula , data.train)
summary(model.full)
##
## Call:
## lm(formula = formula, data = data.train)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.075428 -0.020725 -0.004747 0.016374 0.159836
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.971e+00 9.457e-03 208.387 < 2e-16 ***
## x1 2.578e-05 6.504e-04 0.040 0.968385
## x2 9.335e-05 4.193e-04 0.223 0.823845
## x3 -1.298e-05 1.145e-04 -0.113 0.909747
## x4 -5.242e-05 9.012e-06 -5.817 6.33e-09 ***
## x5 2.460e-04 2.940e-04 0.837 0.402694
## x6 -6.019e-05 5.972e-04 -0.101 0.919727
## x7 1.147e-02 6.397e-04 17.938 < 2e-16 ***
## x8 2.771e-04 1.480e-04 1.872 0.061232 .
## x9 3.378e-03 3.294e-04 10.255 < 2e-16 ***
## x10 1.004e-03 3.070e-04 3.270 0.001081 **
## x11 2.054e+05 7.337e+04 2.800 0.005125 **
## x12 -8.596e-05 1.873e-04 -0.459 0.646304
## x13 5.086e-05 7.501e-05 0.678 0.497722
## x14 -3.492e-04 3.228e-04 -1.082 0.279344
## x15 -5.781e-05 3.075e-04 -0.188 0.850898
## x16 8.416e-04 2.138e-04 3.936 8.40e-05 ***
## x17 1.631e-03 3.240e-04 5.035 4.94e-07 ***
## x19 2.033e-04 1.656e-04 1.228 0.219604
## x20 -2.749e-04 1.149e-03 -0.239 0.810840
## x21 8.752e-05 4.223e-05 2.072 0.038272 *
## x22 -5.462e-04 3.428e-04 -1.593 0.111191
## x23 -1.801e-04 3.298e-04 -0.546 0.584961
## stat1 -1.882e-04 2.491e-04 -0.755 0.449990
## stat2 3.686e-04 2.459e-04 1.499 0.133913
## stat3 2.314e-04 2.468e-04 0.938 0.348526
## stat4 -1.722e-04 2.476e-04 -0.696 0.486594
## stat5 -1.194e-04 2.468e-04 -0.484 0.628457
## stat6 -2.678e-04 2.481e-04 -1.079 0.280535
## stat7 -2.793e-04 2.464e-04 -1.134 0.257039
## stat8 4.718e-04 2.482e-04 1.901 0.057365 .
## stat9 1.773e-04 2.478e-04 0.716 0.474301
## stat10 -1.274e-04 2.463e-04 -0.517 0.605010
## stat11 -4.219e-04 2.502e-04 -1.686 0.091809 .
## stat12 2.466e-04 2.466e-04 1.000 0.317349
## stat13 -3.260e-04 2.471e-04 -1.319 0.187119
## stat14 -9.531e-04 2.456e-04 -3.880 0.000105 ***
## stat15 -1.738e-04 2.458e-04 -0.707 0.479489
## stat16 -6.383e-05 2.469e-04 -0.259 0.796010
## stat17 -7.662e-05 2.449e-04 -0.313 0.754408
## stat18 -2.322e-04 2.469e-04 -0.941 0.346927
## stat19 7.784e-05 2.453e-04 0.317 0.751013
## stat20 -5.052e-04 2.481e-04 -2.037 0.041740 *
## stat21 -2.950e-04 2.477e-04 -1.191 0.233808
## stat22 -4.508e-04 2.476e-04 -1.821 0.068678 .
## stat23 3.885e-04 2.457e-04 1.581 0.113890
## stat24 -3.214e-04 2.483e-04 -1.294 0.195564
## stat25 -6.420e-04 2.455e-04 -2.615 0.008940 **
## stat26 -3.494e-04 2.468e-04 -1.416 0.156920
## stat27 -9.263e-05 2.473e-04 -0.375 0.708041
## stat28 1.161e-04 2.483e-04 0.467 0.640176
## stat29 1.755e-04 2.485e-04 0.706 0.479975
## stat30 2.642e-04 2.489e-04 1.062 0.288431
## stat31 7.362e-05 2.518e-04 0.292 0.770025
## stat32 5.389e-05 2.493e-04 0.216 0.828856
## stat33 -3.389e-04 2.473e-04 -1.371 0.170538
## stat34 -9.860e-05 2.475e-04 -0.398 0.690392
## stat35 -3.667e-04 2.480e-04 -1.479 0.139286
## stat36 -1.177e-04 2.454e-04 -0.480 0.631392
## stat37 -7.131e-05 2.505e-04 -0.285 0.775869
## stat38 3.291e-04 2.493e-04 1.320 0.186835
## stat39 -2.371e-04 2.460e-04 -0.964 0.335206
## stat40 -2.230e-05 2.475e-04 -0.090 0.928193
## stat41 -5.260e-04 2.458e-04 -2.140 0.032401 *
## stat42 6.148e-06 2.471e-04 0.025 0.980151
## stat43 -3.449e-04 2.481e-04 -1.390 0.164490
## stat44 1.695e-04 2.474e-04 0.685 0.493179
## stat45 -3.028e-04 2.478e-04 -1.222 0.221796
## stat46 3.764e-04 2.484e-04 1.515 0.129827
## stat47 2.058e-04 2.490e-04 0.826 0.408564
## stat48 2.461e-04 2.483e-04 0.991 0.321701
## stat49 7.015e-05 2.453e-04 0.286 0.774897
## stat50 3.233e-04 2.455e-04 1.317 0.187833
## stat51 1.219e-04 2.472e-04 0.493 0.621923
## stat52 -9.495e-05 2.485e-04 -0.382 0.702435
## stat53 -1.936e-04 2.511e-04 -0.771 0.440687
## stat54 -3.546e-04 2.490e-04 -1.424 0.154493
## stat55 -2.437e-05 2.457e-04 -0.099 0.920982
## stat56 -1.895e-04 2.464e-04 -0.769 0.442023
## stat57 1.382e-04 2.441e-04 0.566 0.571379
## stat58 -9.369e-05 2.451e-04 -0.382 0.702237
## stat59 5.991e-05 2.477e-04 0.242 0.808899
## stat60 6.000e-04 2.479e-04 2.420 0.015542 *
## stat61 -2.097e-04 2.481e-04 -0.845 0.397961
## stat62 -1.208e-04 2.458e-04 -0.492 0.623045
## stat63 7.981e-05 2.478e-04 0.322 0.747405
## stat64 -3.241e-04 2.469e-04 -1.313 0.189269
## stat65 -3.017e-04 2.490e-04 -1.212 0.225585
## stat66 2.225e-04 2.508e-04 0.887 0.375124
## stat67 -4.353e-05 2.501e-04 -0.174 0.861836
## stat68 1.152e-05 2.471e-04 0.047 0.962818
## stat69 1.106e-04 2.475e-04 0.447 0.655157
## stat70 3.962e-04 2.469e-04 1.605 0.108601
## stat71 1.580e-04 2.453e-04 0.644 0.519439
## stat72 3.925e-04 2.488e-04 1.578 0.114667
## stat73 1.031e-04 2.490e-04 0.414 0.678908
## stat74 -3.151e-04 2.480e-04 -1.270 0.204018
## stat75 -1.163e-04 2.494e-04 -0.466 0.641131
## stat76 1.271e-04 2.480e-04 0.513 0.608197
## stat77 1.543e-04 2.465e-04 0.626 0.531349
## stat78 -3.383e-04 2.481e-04 -1.363 0.172811
## stat79 -1.111e-04 2.481e-04 -0.448 0.654306
## stat80 3.097e-04 2.503e-04 1.237 0.216071
## stat81 2.165e-04 2.481e-04 0.873 0.382872
## stat82 2.648e-04 2.471e-04 1.072 0.283900
## stat83 -1.061e-04 2.477e-04 -0.428 0.668311
## stat84 -3.605e-04 2.457e-04 -1.467 0.142354
## stat85 1.274e-04 2.473e-04 0.515 0.606616
## stat86 2.661e-04 2.481e-04 1.073 0.283515
## stat87 -1.968e-04 2.504e-04 -0.786 0.431933
## stat88 -3.560e-04 2.455e-04 -1.450 0.147082
## stat89 -8.609e-05 2.449e-04 -0.352 0.725167
## stat90 -8.996e-05 2.501e-04 -0.360 0.719117
## stat91 -4.003e-04 2.464e-04 -1.624 0.104366
## stat92 -4.168e-04 2.481e-04 -1.680 0.092988 .
## stat93 -3.370e-04 2.488e-04 -1.354 0.175647
## stat94 -2.062e-04 2.479e-04 -0.832 0.405483
## stat95 -3.077e-06 2.486e-04 -0.012 0.990127
## stat96 -2.081e-04 2.458e-04 -0.847 0.397178
## stat97 2.890e-06 2.465e-04 0.012 0.990646
## stat98 3.511e-03 2.445e-04 14.362 < 2e-16 ***
## stat99 2.181e-04 2.496e-04 0.874 0.382246
## stat100 8.130e-04 2.487e-04 3.269 0.001086 **
## stat101 -4.317e-04 2.492e-04 -1.733 0.083198 .
## stat102 1.912e-04 2.504e-04 0.764 0.445012
## stat103 -3.531e-04 2.501e-04 -1.412 0.158083
## stat104 -3.915e-04 2.469e-04 -1.586 0.112875
## stat105 -6.953e-06 2.457e-04 -0.028 0.977427
## stat106 -7.349e-04 2.460e-04 -2.987 0.002829 **
## stat107 -3.974e-04 2.466e-04 -1.612 0.107038
## stat108 -2.632e-04 2.477e-04 -1.063 0.287987
## stat109 6.273e-05 2.472e-04 0.254 0.799656
## stat110 -3.446e-03 2.463e-04 -13.991 < 2e-16 ***
## stat111 -9.037e-05 2.474e-04 -0.365 0.714866
## stat112 -3.341e-05 2.500e-04 -0.134 0.893686
## stat113 -1.143e-04 2.501e-04 -0.457 0.647613
## stat114 4.422e-04 2.480e-04 1.783 0.074681 .
## stat115 2.464e-04 2.474e-04 0.996 0.319174
## stat116 2.389e-04 2.497e-04 0.957 0.338578
## stat117 -5.749e-05 2.489e-04 -0.231 0.817339
## stat118 -2.189e-04 2.455e-04 -0.892 0.372578
## stat119 -8.624e-05 2.473e-04 -0.349 0.727265
## stat120 1.029e-04 2.468e-04 0.417 0.676762
## stat121 -3.002e-04 2.486e-04 -1.207 0.227391
## stat122 -1.845e-04 2.459e-04 -0.750 0.453130
## stat123 1.127e-04 2.505e-04 0.450 0.652929
## stat124 -4.788e-05 2.478e-04 -0.193 0.846807
## stat125 2.080e-05 2.484e-04 0.084 0.933272
## stat126 1.686e-04 2.468e-04 0.683 0.494637
## stat127 1.442e-04 2.460e-04 0.586 0.557857
## stat128 -1.121e-04 2.476e-04 -0.453 0.650729
## stat129 1.045e-04 2.452e-04 0.426 0.669945
## stat130 3.601e-04 2.478e-04 1.453 0.146301
## stat131 4.242e-04 2.477e-04 1.712 0.086941 .
## stat132 8.880e-05 2.451e-04 0.362 0.717142
## stat133 2.662e-04 2.479e-04 1.074 0.283032
## stat134 -1.739e-04 2.456e-04 -0.708 0.479122
## stat135 -2.637e-05 2.470e-04 -0.107 0.914992
## stat136 7.066e-05 2.477e-04 0.285 0.775435
## stat137 3.614e-05 2.477e-04 0.146 0.884026
## stat138 1.278e-04 2.476e-04 0.516 0.605811
## stat139 2.027e-04 2.481e-04 0.817 0.414098
## stat140 2.519e-04 2.448e-04 1.029 0.303424
## stat141 2.848e-04 2.472e-04 1.152 0.249374
## stat142 -1.340e-04 2.505e-04 -0.535 0.592697
## stat143 3.029e-04 2.467e-04 1.228 0.219607
## stat144 4.175e-04 2.464e-04 1.694 0.090293 .
## stat145 1.700e-04 2.514e-04 0.676 0.499004
## stat146 -3.567e-04 2.492e-04 -1.432 0.152337
## stat147 -2.298e-04 2.480e-04 -0.927 0.354080
## stat148 -4.573e-04 2.461e-04 -1.858 0.063222 .
## stat149 -6.733e-04 2.494e-04 -2.700 0.006960 **
## stat150 -1.036e-04 2.483e-04 -0.417 0.676618
## stat151 -4.171e-05 2.512e-04 -0.166 0.868118
## stat152 -2.308e-04 2.460e-04 -0.938 0.348219
## stat153 1.514e-04 2.515e-04 0.602 0.547318
## stat154 5.313e-05 2.497e-04 0.213 0.831504
## stat155 -6.623e-06 2.470e-04 -0.027 0.978606
## stat156 3.453e-04 2.492e-04 1.385 0.166018
## stat157 -2.453e-04 2.454e-04 -1.000 0.317568
## stat158 -3.004e-04 2.497e-04 -1.203 0.229010
## stat159 9.808e-05 2.462e-04 0.398 0.690421
## stat160 7.423e-05 2.492e-04 0.298 0.765779
## stat161 2.791e-04 2.483e-04 1.124 0.261033
## stat162 5.376e-05 2.464e-04 0.218 0.827282
## stat163 -8.001e-05 2.504e-04 -0.319 0.749365
## stat164 1.557e-04 2.496e-04 0.624 0.532860
## stat165 -4.075e-05 2.452e-04 -0.166 0.868020
## stat166 -1.511e-04 2.445e-04 -0.618 0.536453
## stat167 -8.876e-05 2.469e-04 -0.360 0.719197
## stat168 -1.281e-04 2.473e-04 -0.518 0.604485
## stat169 -4.501e-05 2.464e-04 -0.183 0.855050
## stat170 -2.168e-04 2.490e-04 -0.871 0.383895
## stat171 1.844e-04 2.498e-04 0.738 0.460332
## stat172 3.764e-04 2.464e-04 1.528 0.126684
## stat173 -2.786e-04 2.483e-04 -1.122 0.261949
## stat174 8.121e-05 2.473e-04 0.328 0.742605
## stat175 -3.279e-04 2.476e-04 -1.325 0.185378
## stat176 2.944e-04 2.480e-04 1.187 0.235236
## stat177 1.670e-04 2.491e-04 0.670 0.502681
## stat178 -1.042e-04 2.515e-04 -0.414 0.678713
## stat179 1.432e-04 2.467e-04 0.580 0.561716
## stat180 -2.135e-04 2.461e-04 -0.867 0.385722
## stat181 2.161e-04 2.484e-04 0.870 0.384355
## stat182 9.463e-05 2.488e-04 0.380 0.703756
## stat183 1.118e-04 2.475e-04 0.452 0.651359
## stat184 -1.716e-05 2.496e-04 -0.069 0.945182
## stat185 3.498e-05 2.454e-04 0.143 0.886680
## stat186 -1.207e-04 2.501e-04 -0.483 0.629321
## stat187 -2.228e-04 2.465e-04 -0.904 0.366241
## stat188 2.521e-05 2.483e-04 0.102 0.919146
## stat189 1.050e-04 2.479e-04 0.423 0.671956
## stat190 -7.629e-05 2.457e-04 -0.311 0.756168
## stat191 -4.130e-05 2.489e-04 -0.166 0.868247
## stat192 -1.253e-04 2.490e-04 -0.503 0.614838
## stat193 1.882e-04 2.507e-04 0.751 0.452784
## stat194 -3.007e-06 2.462e-04 -0.012 0.990255
## stat195 2.930e-04 2.478e-04 1.182 0.237067
## stat196 -1.294e-04 2.519e-04 -0.514 0.607526
## stat197 1.821e-04 2.461e-04 0.740 0.459457
## stat198 -4.877e-04 2.478e-04 -1.968 0.049126 *
## stat199 2.593e-04 2.450e-04 1.059 0.289844
## stat200 -2.894e-04 2.448e-04 -1.182 0.237123
## stat201 -6.371e-05 2.468e-04 -0.258 0.796351
## stat202 -2.880e-04 2.505e-04 -1.150 0.250290
## stat203 3.951e-05 2.471e-04 0.160 0.872994
## stat204 -5.295e-04 2.460e-04 -2.152 0.031416 *
## stat205 -3.440e-04 2.454e-04 -1.402 0.160998
## stat206 5.121e-05 2.509e-04 0.204 0.838304
## stat207 3.433e-04 2.479e-04 1.385 0.166155
## stat208 1.579e-05 2.487e-04 0.063 0.949392
## stat209 -1.363e-04 2.466e-04 -0.553 0.580550
## stat210 -2.252e-05 2.483e-04 -0.091 0.927718
## stat211 -1.070e-04 2.484e-04 -0.431 0.666612
## stat212 8.685e-05 2.482e-04 0.350 0.726380
## stat213 -1.692e-04 2.488e-04 -0.680 0.496503
## stat214 -2.501e-04 2.466e-04 -1.014 0.310646
## stat215 -4.070e-05 2.479e-04 -0.164 0.869588
## stat216 -1.334e-04 2.483e-04 -0.537 0.591044
## stat217 5.977e-04 2.480e-04 2.410 0.015980 *
## x18.sqrt 2.728e-02 9.411e-04 28.985 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.03136 on 5343 degrees of freedom
## Multiple R-squared: 0.2782, Adjusted R-squared: 0.2458
## F-statistic: 8.58 on 240 and 5343 DF, p-value: < 2.2e-16
cd.full = plot.diagnostics(model=model.full, train=data.train)
## [1] "Number of data points that have Cook's D > 4/n: 287"
## [1] "Number of data points that have Cook's D > 1: 0"
high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
##
## Call:
## lm(formula = formula, data = data.train2)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.057088 -0.017391 -0.002797 0.016268 0.069593
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.957e+00 7.788e-03 251.258 < 2e-16 ***
## x1 2.512e-04 5.351e-04 0.469 0.638849
## x2 2.482e-05 3.446e-04 0.072 0.942580
## x3 -2.509e-06 9.384e-05 -0.027 0.978668
## x4 -5.918e-05 7.410e-06 -7.987 1.70e-15 ***
## x5 4.422e-04 2.413e-04 1.832 0.066966 .
## x6 -7.295e-04 4.909e-04 -1.486 0.137317
## x7 1.228e-02 5.260e-04 23.349 < 2e-16 ***
## x8 4.646e-04 1.222e-04 3.803 0.000145 ***
## x9 3.387e-03 2.700e-04 12.543 < 2e-16 ***
## x10 1.316e-03 2.528e-04 5.208 1.99e-07 ***
## x11 2.276e+05 6.048e+04 3.762 0.000170 ***
## x12 1.078e-04 1.536e-04 0.702 0.482755
## x13 6.752e-05 6.175e-05 1.093 0.274259
## x14 -8.899e-05 2.650e-04 -0.336 0.736973
## x15 -4.405e-05 2.526e-04 -0.174 0.861547
## x16 8.543e-04 1.759e-04 4.857 1.23e-06 ***
## x17 1.729e-03 2.669e-04 6.478 1.02e-10 ***
## x19 3.661e-05 1.362e-04 0.269 0.788137
## x20 5.703e-04 9.455e-04 0.603 0.546434
## x21 8.364e-05 3.476e-05 2.406 0.016159 *
## x22 -4.755e-04 2.812e-04 -1.691 0.090985 .
## x23 2.443e-05 2.717e-04 0.090 0.928355
## stat1 -1.446e-04 2.046e-04 -0.707 0.479788
## stat2 2.752e-04 2.020e-04 1.362 0.173245
## stat3 3.042e-04 2.029e-04 1.500 0.133792
## stat4 -2.275e-04 2.036e-04 -1.117 0.264042
## stat5 -2.781e-04 2.035e-04 -1.367 0.171775
## stat6 -4.140e-04 2.038e-04 -2.031 0.042319 *
## stat7 -2.584e-04 2.020e-04 -1.279 0.200851
## stat8 3.550e-04 2.034e-04 1.745 0.081051 .
## stat9 9.938e-06 2.040e-04 0.049 0.961154
## stat10 -1.846e-04 2.019e-04 -0.914 0.360686
## stat11 -5.592e-04 2.058e-04 -2.718 0.006599 **
## stat12 1.639e-04 2.026e-04 0.809 0.418517
## stat13 -2.822e-04 2.030e-04 -1.390 0.164661
## stat14 -1.164e-03 2.018e-04 -5.769 8.46e-09 ***
## stat15 -3.248e-04 2.020e-04 -1.608 0.107926
## stat16 -2.869e-04 2.029e-04 -1.414 0.157441
## stat17 -1.873e-04 2.016e-04 -0.929 0.352928
## stat18 -1.049e-04 2.028e-04 -0.517 0.604883
## stat19 2.640e-05 2.021e-04 0.131 0.896064
## stat20 -7.300e-06 2.040e-04 -0.036 0.971454
## stat21 -3.245e-04 2.038e-04 -1.593 0.111278
## stat22 -3.904e-04 2.030e-04 -1.923 0.054519 .
## stat23 3.224e-04 2.024e-04 1.593 0.111157
## stat24 -4.275e-04 2.042e-04 -2.094 0.036354 *
## stat25 -5.762e-04 2.017e-04 -2.857 0.004292 **
## stat26 -2.657e-04 2.028e-04 -1.310 0.190184
## stat27 -1.143e-04 2.037e-04 -0.561 0.574591
## stat28 8.138e-05 2.042e-04 0.399 0.690234
## stat29 1.949e-04 2.044e-04 0.953 0.340432
## stat30 1.653e-04 2.043e-04 0.809 0.418399
## stat31 1.035e-04 2.070e-04 0.500 0.616990
## stat32 2.931e-05 2.049e-04 0.143 0.886267
## stat33 -4.031e-04 2.034e-04 -1.982 0.047568 *
## stat34 1.316e-04 2.039e-04 0.646 0.518517
## stat35 -5.866e-04 2.037e-04 -2.879 0.004007 **
## stat36 -2.069e-04 2.020e-04 -1.024 0.305897
## stat37 3.079e-05 2.059e-04 0.150 0.881108
## stat38 4.131e-04 2.045e-04 2.020 0.043410 *
## stat39 -3.126e-04 2.019e-04 -1.548 0.121674
## stat40 -3.361e-05 2.040e-04 -0.165 0.869118
## stat41 -5.632e-04 2.019e-04 -2.790 0.005291 **
## stat42 7.221e-05 2.034e-04 0.355 0.722660
## stat43 -3.897e-04 2.041e-04 -1.909 0.056281 .
## stat44 2.309e-04 2.035e-04 1.135 0.256598
## stat45 -9.402e-05 2.040e-04 -0.461 0.644971
## stat46 1.878e-04 2.042e-04 0.920 0.357630
## stat47 2.148e-04 2.045e-04 1.050 0.293811
## stat48 1.337e-04 2.036e-04 0.657 0.511513
## stat49 -1.531e-04 2.017e-04 -0.759 0.447877
## stat50 3.018e-04 2.021e-04 1.493 0.135371
## stat51 3.850e-05 2.034e-04 0.189 0.849847
## stat52 2.545e-05 2.046e-04 0.124 0.901036
## stat53 -2.630e-04 2.063e-04 -1.275 0.202457
## stat54 -4.763e-04 2.051e-04 -2.322 0.020284 *
## stat55 -9.262e-06 2.025e-04 -0.046 0.963516
## stat56 -3.389e-05 2.023e-04 -0.168 0.866972
## stat57 5.695e-05 2.009e-04 0.284 0.776756
## stat58 -6.538e-05 2.011e-04 -0.325 0.745131
## stat59 1.449e-04 2.036e-04 0.712 0.476803
## stat60 6.372e-04 2.035e-04 3.131 0.001749 **
## stat61 -1.928e-04 2.046e-04 -0.942 0.346087
## stat62 -2.734e-04 2.019e-04 -1.354 0.175869
## stat63 1.223e-04 2.040e-04 0.600 0.548806
## stat64 -1.347e-04 2.030e-04 -0.663 0.507141
## stat65 -1.712e-04 2.048e-04 -0.836 0.403115
## stat66 2.916e-04 2.066e-04 1.411 0.158212
## stat67 1.179e-04 2.057e-04 0.573 0.566751
## stat68 -1.341e-05 2.031e-04 -0.066 0.947354
## stat69 -1.027e-04 2.034e-04 -0.505 0.613441
## stat70 3.534e-04 2.028e-04 1.743 0.081397 .
## stat71 2.109e-04 2.023e-04 1.043 0.297199
## stat72 3.526e-04 2.043e-04 1.726 0.084494 .
## stat73 -1.159e-05 2.050e-04 -0.057 0.954915
## stat74 -9.212e-05 2.037e-04 -0.452 0.651092
## stat75 1.036e-04 2.051e-04 0.505 0.613607
## stat76 3.322e-05 2.041e-04 0.163 0.870704
## stat77 3.854e-04 2.028e-04 1.900 0.057454 .
## stat78 -4.244e-04 2.033e-04 -2.088 0.036838 *
## stat79 1.915e-05 2.035e-04 0.094 0.925007
## stat80 3.261e-04 2.057e-04 1.586 0.112874
## stat81 1.590e-05 2.041e-04 0.078 0.937887
## stat82 -1.955e-06 2.034e-04 -0.010 0.992331
## stat83 -1.035e-04 2.034e-04 -0.509 0.610903
## stat84 -4.142e-04 2.020e-04 -2.051 0.040357 *
## stat85 -2.321e-04 2.034e-04 -1.141 0.253833
## stat86 2.971e-04 2.044e-04 1.453 0.146212
## stat87 -8.727e-05 2.056e-04 -0.424 0.671301
## stat88 -1.351e-04 2.022e-04 -0.668 0.504043
## stat89 6.701e-05 2.020e-04 0.332 0.740140
## stat90 -2.255e-04 2.058e-04 -1.096 0.273339
## stat91 -5.316e-04 2.024e-04 -2.627 0.008649 **
## stat92 -3.027e-04 2.038e-04 -1.485 0.137552
## stat93 -1.767e-04 2.056e-04 -0.859 0.390352
## stat94 -3.234e-05 2.036e-04 -0.159 0.873762
## stat95 3.136e-04 2.047e-04 1.532 0.125656
## stat96 -1.757e-04 2.024e-04 -0.868 0.385365
## stat97 1.278e-04 2.024e-04 0.632 0.527731
## stat98 3.401e-03 2.012e-04 16.900 < 2e-16 ***
## stat99 3.498e-04 2.054e-04 1.703 0.088536 .
## stat100 9.058e-04 2.044e-04 4.432 9.54e-06 ***
## stat101 -4.656e-04 2.053e-04 -2.268 0.023366 *
## stat102 2.539e-04 2.058e-04 1.233 0.217457
## stat103 -4.699e-04 2.053e-04 -2.289 0.022114 *
## stat104 -3.835e-04 2.029e-04 -1.890 0.058849 .
## stat105 8.294e-05 2.023e-04 0.410 0.681758
## stat106 -6.678e-04 2.020e-04 -3.306 0.000952 ***
## stat107 -2.684e-04 2.026e-04 -1.325 0.185245
## stat108 -1.106e-04 2.042e-04 -0.542 0.588093
## stat109 -1.015e-04 2.033e-04 -0.499 0.617452
## stat110 -3.351e-03 2.024e-04 -16.555 < 2e-16 ***
## stat111 1.048e-04 2.030e-04 0.516 0.605632
## stat112 -8.812e-05 2.060e-04 -0.428 0.668793
## stat113 -8.414e-05 2.056e-04 -0.409 0.682404
## stat114 5.155e-04 2.046e-04 2.519 0.011789 *
## stat115 2.639e-04 2.034e-04 1.297 0.194529
## stat116 2.502e-04 2.053e-04 1.219 0.223020
## stat117 3.642e-05 2.043e-04 0.178 0.858540
## stat118 2.267e-05 2.017e-04 0.112 0.910513
## stat119 -7.852e-05 2.031e-04 -0.387 0.699108
## stat120 -8.349e-05 2.030e-04 -0.411 0.680840
## stat121 -3.547e-04 2.045e-04 -1.735 0.082846 .
## stat122 -2.360e-04 2.027e-04 -1.164 0.244439
## stat123 1.839e-04 2.052e-04 0.896 0.370113
## stat124 -1.058e-05 2.039e-04 -0.052 0.958611
## stat125 -1.058e-05 2.044e-04 -0.052 0.958723
## stat126 2.445e-04 2.029e-04 1.205 0.228285
## stat127 -2.529e-05 2.020e-04 -0.125 0.900382
## stat128 -3.761e-04 2.033e-04 -1.850 0.064400 .
## stat129 1.775e-04 2.013e-04 0.881 0.378142
## stat130 1.147e-04 2.035e-04 0.563 0.573150
## stat131 4.244e-04 2.037e-04 2.083 0.037302 *
## stat132 -3.206e-05 2.014e-04 -0.159 0.873504
## stat133 4.909e-04 2.045e-04 2.400 0.016415 *
## stat134 -1.871e-04 2.019e-04 -0.927 0.354098
## stat135 -9.208e-05 2.032e-04 -0.453 0.650451
## stat136 -6.145e-05 2.032e-04 -0.302 0.762348
## stat137 1.880e-04 2.036e-04 0.923 0.355932
## stat138 -2.484e-05 2.037e-04 -0.122 0.902937
## stat139 -5.910e-05 2.041e-04 -0.290 0.772201
## stat140 2.063e-04 2.006e-04 1.028 0.303797
## stat141 3.546e-04 2.030e-04 1.747 0.080679 .
## stat142 1.739e-05 2.062e-04 0.084 0.932780
## stat143 1.076e-04 2.032e-04 0.530 0.596390
## stat144 4.323e-04 2.025e-04 2.135 0.032837 *
## stat145 7.268e-05 2.070e-04 0.351 0.725582
## stat146 -4.792e-04 2.046e-04 -2.342 0.019195 *
## stat147 -2.510e-04 2.041e-04 -1.230 0.218935
## stat148 -3.725e-04 2.027e-04 -1.838 0.066129 .
## stat149 -7.092e-04 2.052e-04 -3.456 0.000554 ***
## stat150 -1.447e-04 2.045e-04 -0.707 0.479302
## stat151 1.452e-04 2.071e-04 0.701 0.483110
## stat152 -1.765e-04 2.018e-04 -0.875 0.381737
## stat153 3.669e-04 2.065e-04 1.777 0.075674 .
## stat154 1.906e-04 2.054e-04 0.928 0.353536
## stat155 1.745e-04 2.032e-04 0.859 0.390596
## stat156 2.929e-04 2.043e-04 1.433 0.151780
## stat157 -1.951e-04 2.017e-04 -0.968 0.333302
## stat158 1.622e-05 2.050e-04 0.079 0.936941
## stat159 1.451e-04 2.026e-04 0.716 0.473882
## stat160 4.434e-06 2.052e-04 0.022 0.982760
## stat161 2.975e-04 2.042e-04 1.457 0.145279
## stat162 7.944e-05 2.022e-04 0.393 0.694457
## stat163 2.226e-05 2.064e-04 0.108 0.914134
## stat164 6.296e-05 2.057e-04 0.306 0.759552
## stat165 6.029e-05 2.020e-04 0.298 0.765399
## stat166 -1.353e-04 2.007e-04 -0.674 0.500248
## stat167 -1.738e-04 2.030e-04 -0.856 0.392083
## stat168 -8.012e-05 2.032e-04 -0.394 0.693404
## stat169 -9.090e-05 2.029e-04 -0.448 0.654143
## stat170 1.343e-05 2.051e-04 0.065 0.947796
## stat171 1.028e-05 2.051e-04 0.050 0.960039
## stat172 6.213e-04 2.017e-04 3.080 0.002080 **
## stat173 -1.243e-04 2.044e-04 -0.608 0.543033
## stat174 2.945e-04 2.035e-04 1.447 0.147833
## stat175 -2.518e-04 2.035e-04 -1.237 0.216010
## stat176 6.372e-05 2.037e-04 0.313 0.754460
## stat177 -3.013e-04 2.048e-04 -1.471 0.141306
## stat178 3.579e-05 2.067e-04 0.173 0.862505
## stat179 1.678e-04 2.026e-04 0.828 0.407502
## stat180 -1.450e-04 2.028e-04 -0.715 0.474583
## stat181 3.309e-04 2.042e-04 1.620 0.105207
## stat182 1.298e-04 2.051e-04 0.633 0.526824
## stat183 1.076e-04 2.039e-04 0.528 0.597508
## stat184 1.372e-04 2.050e-04 0.669 0.503349
## stat185 2.122e-04 2.020e-04 1.050 0.293549
## stat186 1.108e-04 2.057e-04 0.539 0.590091
## stat187 -2.669e-04 2.023e-04 -1.320 0.187017
## stat188 9.641e-05 2.039e-04 0.473 0.636405
## stat189 -5.101e-05 2.039e-04 -0.250 0.802438
## stat190 -2.362e-04 2.020e-04 -1.170 0.242250
## stat191 7.140e-05 2.046e-04 0.349 0.727094
## stat192 -8.805e-05 2.049e-04 -0.430 0.667459
## stat193 2.411e-04 2.066e-04 1.167 0.243255
## stat194 -2.465e-04 2.029e-04 -1.215 0.224421
## stat195 8.757e-05 2.040e-04 0.429 0.667745
## stat196 -1.786e-04 2.069e-04 -0.863 0.387945
## stat197 -6.278e-05 2.024e-04 -0.310 0.756424
## stat198 -4.191e-04 2.038e-04 -2.057 0.039774 *
## stat199 2.993e-04 2.012e-04 1.488 0.136882
## stat200 -1.397e-04 2.016e-04 -0.693 0.488344
## stat201 1.226e-04 2.036e-04 0.602 0.547249
## stat202 -2.928e-05 2.055e-04 -0.142 0.886726
## stat203 6.364e-05 2.034e-04 0.313 0.754386
## stat204 -1.960e-04 2.025e-04 -0.968 0.333241
## stat205 -6.713e-05 2.014e-04 -0.333 0.738893
## stat206 -2.677e-05 2.060e-04 -0.130 0.896631
## stat207 4.400e-04 2.041e-04 2.156 0.031117 *
## stat208 1.275e-05 2.051e-04 0.062 0.950422
## stat209 -2.736e-05 2.025e-04 -0.135 0.892506
## stat210 -2.684e-04 2.038e-04 -1.317 0.187895
## stat211 -1.576e-04 2.043e-04 -0.771 0.440720
## stat212 1.020e-04 2.042e-04 0.500 0.617336
## stat213 -2.281e-04 2.045e-04 -1.115 0.264741
## stat214 -7.415e-05 2.032e-04 -0.365 0.715227
## stat215 5.218e-06 2.042e-04 0.026 0.979613
## stat216 -1.822e-04 2.039e-04 -0.894 0.371516
## stat217 5.060e-04 2.036e-04 2.485 0.012979 *
## x18.sqrt 2.739e-02 7.714e-04 35.509 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.02508 on 5056 degrees of freedom
## Multiple R-squared: 0.3858, Adjusted R-squared: 0.3566
## F-statistic: 13.23 on 240 and 5056 DF, p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)
## [1] "Number of data points that have Cook's D > 4/n: 278"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before.
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot
plotData = data.train %>%
rownames_to_column() %>%
mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
dplyr::select(type,target=one_of(label.names))
ggplot(data=plotData, aes(x=type,y=target)) +
geom_boxplot(fill='light blue',outlier.shape=NA) +
scale_y_continuous(name="Target Variable Values",label=scales::comma_format(accuracy=.1)) +
theme_light() +
ggtitle('Distribution of High Leverage Points and Normal Points')
# 2 sample t-tests
plotData = data.train %>%
rownames_to_column() %>%
mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
dplyr::select(type,one_of(feature.names))
comp.test = lapply(dplyr::select(plotData, one_of(feature.names))
, function(x) t.test(x ~ plotData$type, var.equal = TRUE))
sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
## x8 stat67 stat74 stat85 stat98 stat110 stat128 stat146 stat158
## 2.099168e-02 2.117154e-02 3.350453e-03 4.077616e-02 7.774182e-07 5.460697e-05 4.454263e-03 1.118930e-03 2.801586e-02
## stat177 stat214 x18.sqrt
## 3.671062e-02 4.575547e-02 5.254017e-03
mm = melt(plotData, id=c('type')) %>% filter(variable %in% names(sig.comp))
ggplot(mm,aes(x=type, y=value)) +
geom_boxplot()+
facet_wrap(~variable, ncol=5, scales = 'free_y') +
scale_y_continuous(name="values",label=scales::comma_format(accuracy=.1)) +
ggtitle('Distribution of High Leverage Points and Normal Points')
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))
ggplot(mm,aes(x=type, y=value)) +
geom_boxplot()+
facet_wrap(~variable, ncol=8, scales = 'free_y') +
scale_y_continuous(name="values",label=scales::comma_format(accuracy=.1)) +
ggtitle('Distribution of High Leverage Points and Normal Points')
model.null = lm(grand.mean.formula, data.train)
summary(model.null)
##
## Call:
## lm(formula = grand.mean.formula, data = data.train)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.10993 -0.02403 -0.00310 0.02066 0.16460
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.0965126 0.0004832 4339 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.0361 on 5583 degrees of freedom
Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/
if (algo.forward.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
, data = data.train
, method = "leapForward"
, feature.names = feature.names)
model.forward = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03395310 0.1165510 0.02647398 0.0005979859 0.02327287 0.0005150109
## 2 2 0.03311925 0.1591907 0.02574789 0.0006352398 0.02370837 0.0005936152
## 3 3 0.03252663 0.1890502 0.02520706 0.0006234383 0.02367460 0.0005685038
## 4 4 0.03200521 0.2149257 0.02451286 0.0006236591 0.02426557 0.0006086824
## 5 5 0.03172206 0.2288497 0.02431828 0.0005121533 0.01962112 0.0005318708
## 6 6 0.03162055 0.2337592 0.02423701 0.0005501750 0.01958096 0.0005801727
## 7 7 0.03155536 0.2368763 0.02420893 0.0005463424 0.01815846 0.0005281206
## 8 8 0.03155004 0.2371337 0.02421475 0.0005328551 0.01824169 0.0005201855
## 9 9 0.03151227 0.2390118 0.02418230 0.0005701987 0.01953932 0.0005480818
## 10 10 0.03151991 0.2386407 0.02418932 0.0005934613 0.02050606 0.0005517946
## 11 11 0.03148826 0.2401233 0.02416840 0.0005839803 0.02024220 0.0005396992
## 12 12 0.03144277 0.2423114 0.02413016 0.0006089127 0.02045617 0.0005421805
## 13 13 0.03144593 0.2421064 0.02412569 0.0005595793 0.01829170 0.0004961523
## 14 14 0.03147217 0.2408524 0.02413358 0.0005546584 0.01777328 0.0004757058
## 15 15 0.03146269 0.2412932 0.02412038 0.0005699604 0.01794180 0.0004964014
## 16 16 0.03146471 0.2412243 0.02413481 0.0005822647 0.01826623 0.0005096920
## 17 17 0.03148108 0.2404943 0.02413768 0.0005893673 0.01807734 0.0005078095
## 18 18 0.03147515 0.2408168 0.02413956 0.0006073607 0.01890258 0.0005080548
## 19 19 0.03149985 0.2396299 0.02416492 0.0005785946 0.01721525 0.0004851554
## 20 20 0.03152458 0.2385580 0.02418177 0.0005823249 0.01733547 0.0005049697
## 21 21 0.03154384 0.2376352 0.02419409 0.0005528740 0.01593001 0.0005103997
## 22 22 0.03155952 0.2369302 0.02421275 0.0005660854 0.01630714 0.0005193433
## 23 23 0.03156639 0.2365991 0.02421827 0.0005658361 0.01662496 0.0005131652
## 24 24 0.03158999 0.2354960 0.02424588 0.0005255184 0.01470465 0.0004703101
## 25 25 0.03159680 0.2352043 0.02425196 0.0005229659 0.01490439 0.0004823987
## 26 26 0.03161106 0.2345785 0.02427067 0.0005133071 0.01432394 0.0004680176
## 27 27 0.03161888 0.2342380 0.02427384 0.0005103039 0.01390706 0.0004630683
## 28 28 0.03163323 0.2336016 0.02428663 0.0005136425 0.01436017 0.0004695734
## 29 29 0.03163760 0.2333876 0.02428353 0.0005096944 0.01385728 0.0004701043
## 30 30 0.03166526 0.2321413 0.02430871 0.0005173133 0.01431754 0.0004666726
## 31 31 0.03167516 0.2316794 0.02431825 0.0005271362 0.01436181 0.0004776827
## 32 32 0.03168470 0.2312574 0.02432051 0.0005155463 0.01372260 0.0004720500
## 33 33 0.03168596 0.2312338 0.02432760 0.0005052891 0.01344898 0.0004743213
## 34 34 0.03170226 0.2304999 0.02435174 0.0005074910 0.01335099 0.0004707768
## 35 35 0.03169112 0.2310189 0.02435180 0.0004876658 0.01245062 0.0004592911
## 36 36 0.03169817 0.2307173 0.02435985 0.0004775096 0.01170234 0.0004425249
## 37 37 0.03169768 0.2307591 0.02435230 0.0004769222 0.01187272 0.0004475476
## 38 38 0.03170422 0.2304759 0.02434784 0.0004784050 0.01236284 0.0004553112
## 39 39 0.03169900 0.2306850 0.02433946 0.0004640654 0.01201197 0.0004485276
## 40 40 0.03170959 0.2302284 0.02434684 0.0004656190 0.01244136 0.0004454247
## 41 41 0.03172476 0.2296067 0.02435336 0.0004667597 0.01296943 0.0004662084
## 42 42 0.03173852 0.2290101 0.02436404 0.0004554085 0.01259913 0.0004706533
## 43 43 0.03174763 0.2286087 0.02437636 0.0004605894 0.01289273 0.0004686942
## 44 44 0.03175939 0.2281420 0.02438737 0.0004764387 0.01335237 0.0004758079
## 45 45 0.03176403 0.2279222 0.02439415 0.0004795397 0.01353388 0.0004827006
## 46 46 0.03177560 0.2274428 0.02441008 0.0004810602 0.01322332 0.0004740892
## 47 47 0.03177915 0.2272811 0.02440784 0.0004738386 0.01294774 0.0004691628
## 48 48 0.03177735 0.2273874 0.02440948 0.0004621807 0.01286290 0.0004670241
## 49 49 0.03178886 0.2269115 0.02442010 0.0004738698 0.01354676 0.0004775241
## 50 50 0.03178674 0.2270401 0.02441758 0.0004732606 0.01370713 0.0004786989
## 51 51 0.03180123 0.2264267 0.02442759 0.0004737043 0.01371125 0.0004939056
## 52 52 0.03179972 0.2265051 0.02442584 0.0004616387 0.01324100 0.0004899591
## 53 53 0.03180317 0.2263372 0.02443887 0.0004598072 0.01297975 0.0004870148
## 54 54 0.03180214 0.2264160 0.02443718 0.0004606013 0.01327592 0.0004761625
## 55 55 0.03181308 0.2259495 0.02444254 0.0004558164 0.01322292 0.0004719684
## 56 56 0.03182918 0.2252072 0.02445364 0.0004485154 0.01277731 0.0004730278
## 57 57 0.03184282 0.2246656 0.02446749 0.0004334029 0.01225466 0.0004640065
## 58 58 0.03184715 0.2245331 0.02446668 0.0004421465 0.01224454 0.0004634234
## 59 59 0.03184034 0.2248748 0.02446845 0.0004405915 0.01205376 0.0004572058
## 60 60 0.03183953 0.2248877 0.02446792 0.0004384121 0.01165394 0.0004594912
## 61 61 0.03184332 0.2247287 0.02447804 0.0004298913 0.01108106 0.0004535169
## 62 62 0.03183450 0.2251372 0.02447205 0.0004368759 0.01154938 0.0004713565
## 63 63 0.03183354 0.2252314 0.02447122 0.0004356239 0.01169205 0.0004603636
## 64 64 0.03183802 0.2250486 0.02447508 0.0004271380 0.01158192 0.0004584795
## 65 65 0.03183450 0.2252077 0.02447937 0.0004223809 0.01145612 0.0004598576
## 66 66 0.03185004 0.2245405 0.02449921 0.0004212396 0.01113369 0.0004596119
## 67 67 0.03184922 0.2245838 0.02450020 0.0004139884 0.01071872 0.0004557884
## 68 68 0.03185558 0.2243229 0.02450717 0.0004128238 0.01065996 0.0004582725
## 69 69 0.03185539 0.2244190 0.02449670 0.0004169911 0.01077948 0.0004669268
## 70 70 0.03185924 0.2242715 0.02450587 0.0004059740 0.01041746 0.0004671327
## 71 71 0.03187290 0.2237093 0.02451956 0.0004219245 0.01088320 0.0004710732
## 72 72 0.03187298 0.2237404 0.02452303 0.0004256145 0.01101298 0.0004717053
## 73 73 0.03187957 0.2234953 0.02453094 0.0004252274 0.01104343 0.0004761809
## 74 74 0.03187966 0.2234912 0.02453705 0.0004320580 0.01140109 0.0004783732
## 75 75 0.03188271 0.2233591 0.02454342 0.0004379869 0.01179732 0.0004823819
## 76 76 0.03188553 0.2232078 0.02454993 0.0004230999 0.01150104 0.0004777281
## 77 77 0.03189781 0.2227561 0.02455777 0.0004288067 0.01177278 0.0004911970
## 78 78 0.03190299 0.2225528 0.02455162 0.0004313009 0.01201515 0.0004930560
## 79 79 0.03191706 0.2219676 0.02456719 0.0004344594 0.01215894 0.0004951524
## 80 80 0.03191458 0.2221217 0.02456577 0.0004338667 0.01214463 0.0005033395
## 81 81 0.03192033 0.2219051 0.02456439 0.0004313690 0.01206774 0.0004930222
## 82 82 0.03191424 0.2222243 0.02455970 0.0004385719 0.01182889 0.0004971302
## 83 83 0.03190928 0.2224570 0.02455892 0.0004255322 0.01149986 0.0004871277
## 84 84 0.03191755 0.2221314 0.02457052 0.0004232839 0.01117868 0.0004862624
## 85 85 0.03191502 0.2223054 0.02456890 0.0004211698 0.01143577 0.0004787257
## 86 86 0.03191369 0.2223950 0.02457104 0.0004284746 0.01167188 0.0004820153
## 87 87 0.03192401 0.2219614 0.02457736 0.0004267010 0.01189305 0.0004923923
## 88 88 0.03192809 0.2218327 0.02457636 0.0004216408 0.01208431 0.0004861794
## 89 89 0.03192496 0.2220101 0.02457205 0.0004191347 0.01204861 0.0004817634
## 90 90 0.03192372 0.2220988 0.02456924 0.0004239662 0.01237410 0.0004968204
## 91 91 0.03192057 0.2223211 0.02456627 0.0004196135 0.01244394 0.0004991157
## 92 92 0.03191530 0.2225472 0.02456019 0.0004183271 0.01219951 0.0004891754
## 93 93 0.03191525 0.2225446 0.02456133 0.0004208912 0.01216660 0.0004976851
## 94 94 0.03192166 0.2222474 0.02456481 0.0004196560 0.01199398 0.0004984875
## 95 95 0.03192419 0.2221541 0.02456745 0.0004232848 0.01215541 0.0004963700
## 96 96 0.03193362 0.2217597 0.02457194 0.0004201043 0.01202102 0.0004941792
## 97 97 0.03194181 0.2214418 0.02457978 0.0004173347 0.01211418 0.0004947950
## 98 98 0.03195221 0.2210132 0.02459481 0.0004175411 0.01195069 0.0004940557
## 99 99 0.03195340 0.2209655 0.02459048 0.0004130466 0.01144812 0.0004856121
## 100 100 0.03196131 0.2206612 0.02459209 0.0004115659 0.01156731 0.0004873460
## 101 101 0.03195895 0.2207842 0.02458702 0.0004195615 0.01161447 0.0004908334
## 102 102 0.03196284 0.2206291 0.02458965 0.0004269593 0.01208315 0.0004843883
## 103 103 0.03195155 0.2211550 0.02457704 0.0004294558 0.01194471 0.0004835014
## 104 104 0.03195727 0.2209238 0.02458284 0.0004262864 0.01201741 0.0004852471
## 105 105 0.03196648 0.2205133 0.02459340 0.0004284747 0.01229800 0.0004918014
## 106 106 0.03196671 0.2204789 0.02459488 0.0004234854 0.01215901 0.0004849204
## 107 107 0.03196666 0.2204595 0.02459905 0.0004207627 0.01204928 0.0004780259
## 108 108 0.03196283 0.2206418 0.02459403 0.0004172090 0.01199478 0.0004819830
## 109 109 0.03195375 0.2210648 0.02458743 0.0004256806 0.01249521 0.0004927351
## 110 110 0.03196173 0.2207523 0.02459172 0.0004329518 0.01250046 0.0004942464
## 111 111 0.03196470 0.2206294 0.02459073 0.0004317743 0.01254127 0.0004956768
## 112 112 0.03196141 0.2207754 0.02458459 0.0004318349 0.01264790 0.0004916967
## 113 113 0.03196259 0.2207363 0.02458115 0.0004330002 0.01271557 0.0004941072
## 114 114 0.03195624 0.2210359 0.02458176 0.0004439094 0.01315442 0.0004959435
## 115 115 0.03196558 0.2206297 0.02458863 0.0004433333 0.01303437 0.0004829788
## 116 116 0.03196894 0.2204872 0.02458861 0.0004423455 0.01294523 0.0004856856
## 117 117 0.03197619 0.2201545 0.02459636 0.0004345137 0.01268478 0.0004835922
## 118 118 0.03197450 0.2202546 0.02459413 0.0004417386 0.01290968 0.0004821785
## 119 119 0.03197755 0.2201050 0.02459570 0.0004326417 0.01273772 0.0004802960
## 120 120 0.03197897 0.2200545 0.02460166 0.0004200477 0.01240690 0.0004787944
## 121 121 0.03198345 0.2199088 0.02460018 0.0004250412 0.01261212 0.0004842790
## 122 122 0.03199256 0.2195512 0.02460871 0.0004199323 0.01260969 0.0004846595
## 123 123 0.03200091 0.2191705 0.02461751 0.0004185160 0.01267224 0.0004803541
## 124 124 0.03199880 0.2192748 0.02461573 0.0004149147 0.01275911 0.0004764288
## 125 125 0.03199581 0.2194230 0.02461190 0.0004089477 0.01260253 0.0004717770
## 126 126 0.03199980 0.2192350 0.02461378 0.0003986790 0.01235094 0.0004625450
## 127 127 0.03199913 0.2192525 0.02461726 0.0003926853 0.01210645 0.0004621815
## 128 128 0.03200112 0.2191827 0.02461896 0.0003916793 0.01213189 0.0004593878
## 129 129 0.03199869 0.2193024 0.02462074 0.0003958458 0.01216789 0.0004635530
## 130 130 0.03199885 0.2193160 0.02461935 0.0003983767 0.01228160 0.0004646664
## 131 131 0.03200172 0.2192042 0.02462345 0.0003976480 0.01237453 0.0004644656
## 132 132 0.03200145 0.2192112 0.02462228 0.0003957050 0.01222040 0.0004593567
## 133 133 0.03200066 0.2192564 0.02462184 0.0004006978 0.01233184 0.0004639605
## 134 134 0.03200419 0.2190889 0.02462600 0.0004043448 0.01230815 0.0004699874
## 135 135 0.03200624 0.2190143 0.02462917 0.0004031750 0.01244296 0.0004757191
## 136 136 0.03200580 0.2190514 0.02462676 0.0004028184 0.01237139 0.0004724673
## 137 137 0.03200399 0.2191243 0.02462183 0.0004043963 0.01235839 0.0004738249
## 138 138 0.03201145 0.2188452 0.02462667 0.0004045099 0.01233789 0.0004721009
## 139 139 0.03201013 0.2189028 0.02462685 0.0004066063 0.01248073 0.0004787351
## 140 140 0.03200957 0.2189313 0.02462563 0.0004061626 0.01235894 0.0004794859
## 141 141 0.03201551 0.2186889 0.02462928 0.0004083552 0.01236020 0.0004816451
## 142 142 0.03202137 0.2184263 0.02463773 0.0004102059 0.01213833 0.0004824374
## 143 143 0.03201797 0.2185852 0.02463506 0.0004141303 0.01222100 0.0004829585
## 144 144 0.03202222 0.2184019 0.02464031 0.0004165780 0.01223153 0.0004863700
## 145 145 0.03202375 0.2183183 0.02464417 0.0004099083 0.01192225 0.0004792128
## 146 146 0.03202417 0.2183371 0.02464376 0.0004101688 0.01222218 0.0004835274
## 147 147 0.03202233 0.2184189 0.02464111 0.0004097381 0.01225586 0.0004821929
## 148 148 0.03202623 0.2182385 0.02464608 0.0004069275 0.01213402 0.0004774815
## 149 149 0.03202041 0.2185037 0.02463846 0.0004094465 0.01236256 0.0004815277
## 150 150 0.03202004 0.2185247 0.02463644 0.0004097893 0.01231326 0.0004820157
## 151 151 0.03202065 0.2184927 0.02463585 0.0004136082 0.01234672 0.0004865664
## 152 152 0.03202226 0.2184224 0.02464184 0.0004171155 0.01238273 0.0004867587
## 153 153 0.03202098 0.2184655 0.02464121 0.0004161408 0.01231534 0.0004892888
## 154 154 0.03201736 0.2186147 0.02463977 0.0004152349 0.01236594 0.0004943067
## 155 155 0.03201398 0.2187811 0.02463369 0.0004130180 0.01229778 0.0004903634
## 156 156 0.03201823 0.2186124 0.02463581 0.0004076306 0.01217950 0.0004854505
## 157 157 0.03202270 0.2184235 0.02464108 0.0004046583 0.01221723 0.0004831594
## 158 158 0.03202396 0.2183805 0.02464058 0.0004069901 0.01224242 0.0004814661
## 159 159 0.03202436 0.2184022 0.02464197 0.0004092393 0.01249930 0.0004820512
## 160 160 0.03202715 0.2183064 0.02464115 0.0004101134 0.01263529 0.0004833841
## 161 161 0.03202687 0.2183362 0.02464304 0.0004116264 0.01271263 0.0004787485
## 162 162 0.03202551 0.2183751 0.02464176 0.0004048946 0.01235467 0.0004783706
## 163 163 0.03202513 0.2184060 0.02464270 0.0004066129 0.01235073 0.0004792755
## 164 164 0.03202235 0.2185315 0.02464144 0.0004098026 0.01237531 0.0004798566
## 165 165 0.03202907 0.2182541 0.02464843 0.0004096008 0.01246158 0.0004790820
## 166 166 0.03202349 0.2185066 0.02464277 0.0004066953 0.01248611 0.0004812808
## 167 167 0.03202366 0.2184914 0.02464556 0.0004077478 0.01249389 0.0004809971
## 168 168 0.03202602 0.2184152 0.02464955 0.0004032509 0.01245701 0.0004783182
## 169 169 0.03202757 0.2183740 0.02465139 0.0004055371 0.01255639 0.0004820980
## 170 170 0.03202583 0.2184589 0.02464788 0.0003995972 0.01242639 0.0004792175
## 171 171 0.03202573 0.2184636 0.02464812 0.0004007060 0.01248566 0.0004785376
## 172 172 0.03202744 0.2183824 0.02464924 0.0004027685 0.01239713 0.0004816064
## 173 173 0.03202620 0.2184307 0.02464815 0.0003998428 0.01221133 0.0004778987
## 174 174 0.03202656 0.2184248 0.02464724 0.0003995446 0.01231772 0.0004792775
## 175 175 0.03202956 0.2183048 0.02465050 0.0004024071 0.01247359 0.0004802335
## 176 176 0.03202746 0.2184136 0.02464741 0.0004038045 0.01255782 0.0004833052
## 177 177 0.03203113 0.2182648 0.02464919 0.0004041137 0.01268905 0.0004860247
## 178 178 0.03202786 0.2184197 0.02464497 0.0004069565 0.01296123 0.0004875360
## 179 179 0.03203370 0.2181785 0.02464913 0.0004100965 0.01314584 0.0004907344
## 180 180 0.03203363 0.2181793 0.02464923 0.0004097293 0.01328810 0.0004910026
## 181 181 0.03203262 0.2182431 0.02464978 0.0004123029 0.01338778 0.0004918606
## 182 182 0.03203145 0.2182997 0.02464748 0.0004123559 0.01331811 0.0004926006
## 183 183 0.03203168 0.2182958 0.02465036 0.0004132944 0.01345086 0.0004915900
## 184 184 0.03203036 0.2183487 0.02465168 0.0004115151 0.01345545 0.0004920568
## 185 185 0.03203350 0.2182263 0.02465349 0.0004089793 0.01348104 0.0004894535
## 186 186 0.03203672 0.2180950 0.02465680 0.0004122775 0.01360558 0.0004916226
## 187 187 0.03203811 0.2180390 0.02465873 0.0004112225 0.01359375 0.0004913361
## 188 188 0.03203738 0.2180546 0.02465817 0.0004109635 0.01362897 0.0004905349
## 189 189 0.03203759 0.2180426 0.02465919 0.0004119832 0.01360466 0.0004893970
## 190 190 0.03203737 0.2180728 0.02465607 0.0004126104 0.01375357 0.0004910216
## 191 191 0.03203901 0.2180021 0.02465887 0.0004115985 0.01369966 0.0004909803
## 192 192 0.03203902 0.2180063 0.02465844 0.0004125217 0.01371250 0.0004911413
## 193 193 0.03204134 0.2179064 0.02466164 0.0004108572 0.01371604 0.0004920245
## 194 194 0.03203870 0.2180177 0.02466026 0.0004081080 0.01359524 0.0004913485
## 195 195 0.03203870 0.2180386 0.02465988 0.0004085829 0.01367994 0.0004914215
## 196 196 0.03203796 0.2180648 0.02466083 0.0004073218 0.01366825 0.0004886111
## 197 197 0.03203945 0.2180111 0.02466239 0.0004054981 0.01364913 0.0004875895
## 198 198 0.03204257 0.2178645 0.02466503 0.0004032203 0.01357329 0.0004858109
## 199 199 0.03204175 0.2179022 0.02466438 0.0004061120 0.01366066 0.0004868459
## 200 200 0.03204325 0.2178473 0.02466500 0.0004070171 0.01371551 0.0004876508
## 201 201 0.03204245 0.2178859 0.02466378 0.0004091036 0.01370401 0.0004897748
## 202 202 0.03204254 0.2178968 0.02466384 0.0004126305 0.01391193 0.0004929120
## 203 203 0.03204375 0.2178490 0.02466528 0.0004136179 0.01397308 0.0004929948
## 204 204 0.03204379 0.2178626 0.02466640 0.0004161424 0.01405752 0.0004950381
## 205 205 0.03204187 0.2179528 0.02466350 0.0004156100 0.01409997 0.0004946321
## 206 206 0.03204384 0.2178641 0.02466501 0.0004165036 0.01414663 0.0004947564
## 207 207 0.03204526 0.2177972 0.02466537 0.0004137923 0.01396160 0.0004902230
## 208 208 0.03204458 0.2178226 0.02466569 0.0004122505 0.01394233 0.0004894946
## 209 209 0.03204469 0.2178272 0.02466659 0.0004124110 0.01399642 0.0004896656
## 210 210 0.03204371 0.2178649 0.02466620 0.0004132771 0.01403759 0.0004912000
## 211 211 0.03204358 0.2178735 0.02466558 0.0004141221 0.01411463 0.0004929423
## 212 212 0.03204335 0.2178848 0.02466556 0.0004147198 0.01414212 0.0004930190
## 213 213 0.03204205 0.2179355 0.02466485 0.0004162104 0.01416073 0.0004926137
## 214 214 0.03204192 0.2179498 0.02466390 0.0004168656 0.01422317 0.0004931778
## 215 215 0.03204168 0.2179653 0.02466405 0.0004171961 0.01425271 0.0004931760
## 216 216 0.03204231 0.2179339 0.02466461 0.0004171828 0.01429500 0.0004937500
## 217 217 0.03204409 0.2178578 0.02466632 0.0004166875 0.01427576 0.0004938043
## 218 218 0.03204360 0.2178781 0.02466638 0.0004178487 0.01431699 0.0004942786
## 219 219 0.03204414 0.2178556 0.02466685 0.0004176249 0.01434055 0.0004947478
## 220 220 0.03204468 0.2178355 0.02466744 0.0004178702 0.01434112 0.0004946425
## 221 221 0.03204496 0.2178199 0.02466793 0.0004171791 0.01430380 0.0004933065
## 222 222 0.03204598 0.2177757 0.02466913 0.0004172846 0.01431543 0.0004930348
## 223 223 0.03204660 0.2177524 0.02466984 0.0004165990 0.01429830 0.0004934110
## 224 224 0.03204693 0.2177398 0.02466967 0.0004169454 0.01432580 0.0004942027
## 225 225 0.03204576 0.2177908 0.02466855 0.0004179333 0.01435635 0.0004946893
## 226 226 0.03204662 0.2177579 0.02466911 0.0004179622 0.01437743 0.0004941819
## 227 227 0.03204675 0.2177498 0.02466934 0.0004178367 0.01435315 0.0004932246
## 228 228 0.03204672 0.2177517 0.02466905 0.0004173251 0.01433745 0.0004927076
## 229 229 0.03204632 0.2177666 0.02466890 0.0004174308 0.01433346 0.0004934692
## 230 230 0.03204622 0.2177733 0.02466865 0.0004174209 0.01434463 0.0004934204
## 231 231 0.03204718 0.2177334 0.02466928 0.0004175185 0.01437623 0.0004934526
## 232 232 0.03204717 0.2177352 0.02466906 0.0004179389 0.01439881 0.0004939515
## 233 233 0.03204684 0.2177497 0.02466872 0.0004174698 0.01437902 0.0004935217
## 234 234 0.03204684 0.2177513 0.02466836 0.0004177045 0.01439358 0.0004936237
## 235 235 0.03204685 0.2177523 0.02466836 0.0004174856 0.01438837 0.0004937148
## 236 236 0.03204686 0.2177515 0.02466831 0.0004174937 0.01438594 0.0004936900
## 237 237 0.03204697 0.2177468 0.02466842 0.0004172423 0.01438079 0.0004934998
## 238 238 0.03204699 0.2177449 0.02466846 0.0004171936 0.01437642 0.0004936864
## 239 239 0.03204712 0.2177394 0.02466853 0.0004172112 0.01437343 0.0004935919
## 240 240 0.03204715 0.2177384 0.02466849 0.0004172579 0.01437592 0.0004936210
## [1] "Best Model"
## nvmax
## 12 12
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.997103e+00 1.9906386244 2.003568e+00
## x4 -5.427583e-05 -0.0000716205 -3.693116e-05
## x7 1.122390e-02 0.0099972365 1.245057e-02
## x9 3.330616e-03 0.0026964717 3.964760e-03
## x10 9.708542e-04 0.0003799704 1.561738e-03
## x16 8.621833e-04 0.0004505316 1.273835e-03
## x17 1.587577e-03 0.0009649579 2.210196e-03
## stat14 -8.668805e-04 -0.0013371562 -3.966047e-04
## stat98 3.548025e-03 0.0030790242 4.017027e-03
## stat100 8.029460e-04 0.0003257120 1.280180e-03
## stat106 -8.063776e-04 -0.0012800279 -3.327274e-04
## stat110 -3.375726e-03 -0.0038506405 -2.900812e-03
## x18.sqrt 2.705501e-02 0.0252462261 2.886379e-02
if (algo.forward.caret == TRUE){
test.model(model=model.forward, test=data.test
,method = 'leapForward',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.040 2.085 2.098 2.098 2.111 2.149
## [1] "leapForward Test MSE: 0.00105272538908082"
if (algo.backward.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "leapBackward"
,feature.names = feature.names)
model.backward = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 12 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03395310 0.1165510 0.02647398 0.0005979859 0.023272866 0.0005150109
## 2 2 0.03311925 0.1591907 0.02574789 0.0006352398 0.023708368 0.0005936152
## 3 3 0.03252663 0.1890502 0.02520706 0.0006234383 0.023674604 0.0005685038
## 4 4 0.03200521 0.2149257 0.02451286 0.0006236591 0.024265568 0.0006086824
## 5 5 0.03172206 0.2288497 0.02431828 0.0005121533 0.019621124 0.0005318708
## 6 6 0.03162055 0.2337592 0.02423701 0.0005501750 0.019580959 0.0005801727
## 7 7 0.03155536 0.2368763 0.02420893 0.0005463424 0.018158457 0.0005281206
## 8 8 0.03155004 0.2371337 0.02421475 0.0005328551 0.018241687 0.0005201855
## 9 9 0.03151227 0.2390118 0.02418230 0.0005701987 0.019539325 0.0005480818
## 10 10 0.03152160 0.2385622 0.02419215 0.0005938655 0.020447686 0.0005521268
## 11 11 0.03148801 0.2401440 0.02417114 0.0005839110 0.020256690 0.0005399899
## 12 12 0.03144277 0.2423114 0.02413016 0.0006089127 0.020456168 0.0005421805
## 13 13 0.03144593 0.2421064 0.02412569 0.0005595793 0.018291703 0.0004961523
## 14 14 0.03147217 0.2408524 0.02413358 0.0005546584 0.017773284 0.0004757058
## 15 15 0.03146269 0.2412932 0.02412038 0.0005699604 0.017941795 0.0004964014
## 16 16 0.03146471 0.2412243 0.02413481 0.0005822647 0.018266232 0.0005096920
## 17 17 0.03148108 0.2404943 0.02413768 0.0005893673 0.018077338 0.0005078095
## 18 18 0.03146560 0.2412452 0.02413348 0.0005997718 0.018528265 0.0004986066
## 19 19 0.03149762 0.2397434 0.02416346 0.0005787384 0.017208467 0.0004946583
## 20 20 0.03152209 0.2386842 0.02418103 0.0005837738 0.017425685 0.0005165697
## 21 21 0.03154521 0.2375805 0.02419415 0.0005531043 0.015933179 0.0005103523
## 22 22 0.03155348 0.2372139 0.02420674 0.0005655556 0.016327113 0.0005249120
## 23 23 0.03156183 0.2368220 0.02421618 0.0005652263 0.016624459 0.0005150889
## 24 24 0.03158145 0.2358966 0.02424040 0.0005244432 0.014707452 0.0004766699
## 25 25 0.03159438 0.2353268 0.02424901 0.0005317027 0.014909406 0.0004852912
## 26 26 0.03159943 0.2351187 0.02426318 0.0005384239 0.014519764 0.0004759744
## 27 27 0.03160490 0.2348844 0.02426275 0.0005268802 0.014219841 0.0004721331
## 28 28 0.03163405 0.2335564 0.02428858 0.0005319487 0.014450387 0.0004780203
## 29 29 0.03165139 0.2327398 0.02430130 0.0005204871 0.014067055 0.0004743553
## 30 30 0.03167139 0.2318283 0.02431747 0.0005180694 0.014000357 0.0004559166
## 31 31 0.03166329 0.2322137 0.02431027 0.0005132996 0.013676215 0.0004626459
## 32 32 0.03166919 0.2319527 0.02430794 0.0005169382 0.013786337 0.0004785733
## 33 33 0.03167906 0.2315439 0.02432932 0.0005083230 0.013660826 0.0004733039
## 34 34 0.03169819 0.2306617 0.02434201 0.0004998325 0.013093080 0.0004686308
## 35 35 0.03169059 0.2310494 0.02434699 0.0004873143 0.012510155 0.0004600941
## 36 36 0.03169724 0.2307368 0.02435723 0.0004889090 0.012112317 0.0004543717
## 37 37 0.03169836 0.2307055 0.02435164 0.0004881304 0.012222820 0.0004549147
## 38 38 0.03170701 0.2303239 0.02434525 0.0004826430 0.012107359 0.0004486601
## 39 39 0.03169713 0.2307845 0.02433937 0.0004734111 0.012073691 0.0004403999
## 40 40 0.03170445 0.2305046 0.02434130 0.0004802933 0.012976887 0.0004530974
## 41 41 0.03171804 0.2299362 0.02435503 0.0004762253 0.013242319 0.0004578494
## 42 42 0.03172346 0.2297130 0.02434993 0.0004665461 0.012915962 0.0004530804
## 43 43 0.03174078 0.2289451 0.02436506 0.0004730260 0.013232372 0.0004558813
## 44 44 0.03174482 0.2287799 0.02437021 0.0004732916 0.013178101 0.0004611665
## 45 45 0.03174868 0.2285862 0.02438842 0.0004777297 0.013159065 0.0004697285
## 46 46 0.03174688 0.2287022 0.02439127 0.0004793647 0.012840934 0.0004625124
## 47 47 0.03175316 0.2284049 0.02439737 0.0004640452 0.012103807 0.0004528851
## 48 48 0.03176368 0.2279236 0.02439849 0.0004589315 0.012014977 0.0004582795
## 49 49 0.03176996 0.2277003 0.02440908 0.0004568498 0.012260310 0.0004566862
## 50 50 0.03178485 0.2270363 0.02442208 0.0004557974 0.012121143 0.0004618342
## 51 51 0.03178912 0.2268701 0.02442496 0.0004558567 0.012502443 0.0004836610
## 52 52 0.03180075 0.2263582 0.02443753 0.0004443172 0.012189138 0.0004765622
## 53 53 0.03180107 0.2264033 0.02444120 0.0004458541 0.012599132 0.0004646160
## 54 54 0.03181601 0.2257795 0.02444503 0.0004382262 0.012295345 0.0004581232
## 55 55 0.03182743 0.2252800 0.02445874 0.0004284392 0.011993900 0.0004672329
## 56 56 0.03184044 0.2247309 0.02447221 0.0004234058 0.011982930 0.0004645769
## 57 57 0.03184231 0.2246848 0.02446962 0.0004148796 0.012061935 0.0004520298
## 58 58 0.03184898 0.2244676 0.02447490 0.0004305123 0.012197036 0.0004595675
## 59 59 0.03184637 0.2245903 0.02447680 0.0004314316 0.011941455 0.0004569903
## 60 60 0.03184862 0.2244886 0.02447851 0.0004392890 0.012062999 0.0004738218
## 61 61 0.03184808 0.2244986 0.02448584 0.0004334080 0.011653893 0.0004661748
## 62 62 0.03184275 0.2247465 0.02448505 0.0004297614 0.011593864 0.0004660400
## 63 63 0.03184642 0.2245886 0.02448860 0.0004206964 0.011282993 0.0004489997
## 64 64 0.03185206 0.2243990 0.02449903 0.0004151096 0.010800438 0.0004515277
## 65 65 0.03184504 0.2247504 0.02450284 0.0004101474 0.010674930 0.0004465208
## 66 66 0.03184003 0.2249645 0.02450241 0.0004141902 0.010629324 0.0004551475
## 67 67 0.03184474 0.2247614 0.02450912 0.0004105049 0.010334779 0.0004531789
## 68 68 0.03184320 0.2248845 0.02450213 0.0004097553 0.010633492 0.0004548779
## 69 69 0.03184325 0.2249105 0.02450059 0.0004015724 0.009860752 0.0004506357
## 70 70 0.03185002 0.2246638 0.02451160 0.0004106303 0.010552631 0.0004618237
## 71 71 0.03185925 0.2243061 0.02452364 0.0004281431 0.011136156 0.0004692411
## 72 72 0.03186173 0.2242277 0.02452908 0.0004260811 0.011133907 0.0004735031
## 73 73 0.03186302 0.2241835 0.02453338 0.0004332258 0.011408465 0.0004721704
## 74 74 0.03186173 0.2242815 0.02453075 0.0004353288 0.011639288 0.0004810758
## 75 75 0.03186368 0.2242154 0.02453565 0.0004375910 0.011822965 0.0004854816
## 76 76 0.03187224 0.2238857 0.02454084 0.0004321657 0.012028608 0.0004992030
## 77 77 0.03187776 0.2236727 0.02453723 0.0004461965 0.012504528 0.0005053290
## 78 78 0.03188651 0.2233100 0.02453757 0.0004411440 0.012342143 0.0005016296
## 79 79 0.03188519 0.2234114 0.02453472 0.0004392118 0.012380130 0.0005050875
## 80 80 0.03189384 0.2230227 0.02454394 0.0004269090 0.011425146 0.0004894759
## 81 81 0.03189687 0.2229541 0.02454650 0.0004164580 0.011354288 0.0004806964
## 82 82 0.03190291 0.2227316 0.02455293 0.0004071958 0.011227919 0.0004705537
## 83 83 0.03190670 0.2225797 0.02455234 0.0004147241 0.011074671 0.0004694983
## 84 84 0.03190412 0.2226995 0.02455625 0.0004246055 0.011352780 0.0004791558
## 85 85 0.03190585 0.2226805 0.02455579 0.0004314023 0.011493339 0.0004752353
## 86 86 0.03190362 0.2228024 0.02455762 0.0004398723 0.011900699 0.0004887547
## 87 87 0.03190246 0.2228834 0.02454726 0.0004401845 0.012167369 0.0004840153
## 88 88 0.03190834 0.2226535 0.02455286 0.0004424973 0.012167607 0.0004864108
## 89 89 0.03190958 0.2226415 0.02455382 0.0004309769 0.012175568 0.0004907133
## 90 90 0.03191165 0.2226075 0.02455880 0.0004306420 0.012121763 0.0004952165
## 91 91 0.03191290 0.2225623 0.02456178 0.0004329858 0.012493786 0.0004989735
## 92 92 0.03191412 0.2225763 0.02456270 0.0004356065 0.012870201 0.0004986899
## 93 93 0.03191748 0.2224430 0.02456462 0.0004300769 0.012532903 0.0005025477
## 94 94 0.03192939 0.2219120 0.02457202 0.0004265547 0.012351269 0.0004978305
## 95 95 0.03192990 0.2218966 0.02457615 0.0004224328 0.011973993 0.0004932663
## 96 96 0.03193342 0.2217635 0.02457474 0.0004156698 0.011869585 0.0004870485
## 97 97 0.03194228 0.2214032 0.02458178 0.0004138702 0.012039692 0.0004901824
## 98 98 0.03195346 0.2209251 0.02459518 0.0004135423 0.011746320 0.0004792506
## 99 99 0.03195579 0.2208353 0.02459549 0.0004061547 0.011331129 0.0004683806
## 100 100 0.03195438 0.2209110 0.02458744 0.0004118273 0.011343631 0.0004735021
## 101 101 0.03196000 0.2207051 0.02458963 0.0004190202 0.011645539 0.0004719627
## 102 102 0.03195697 0.2208753 0.02458634 0.0004252468 0.011847687 0.0004761344
## 103 103 0.03194442 0.2214508 0.02456947 0.0004227707 0.011796382 0.0004760802
## 104 104 0.03195106 0.2211792 0.02457691 0.0004248272 0.012146738 0.0004817882
## 105 105 0.03196185 0.2207095 0.02458832 0.0004275342 0.012389658 0.0004904059
## 106 106 0.03196446 0.2205654 0.02459516 0.0004242725 0.012276635 0.0004901820
## 107 107 0.03196092 0.2207176 0.02459412 0.0004194459 0.012155982 0.0004821886
## 108 108 0.03195831 0.2208609 0.02459264 0.0004161540 0.012170004 0.0004829870
## 109 109 0.03195416 0.2210525 0.02458977 0.0004138003 0.012084950 0.0004797847
## 110 110 0.03195697 0.2209787 0.02458987 0.0004252389 0.012383418 0.0004763850
## 111 111 0.03196019 0.2208610 0.02459297 0.0004315620 0.012644804 0.0004825839
## 112 112 0.03195633 0.2210310 0.02458514 0.0004233481 0.012403514 0.0004806036
## 113 113 0.03196217 0.2207826 0.02458910 0.0004313584 0.012772986 0.0004847206
## 114 114 0.03196858 0.2205216 0.02459164 0.0004325225 0.012539516 0.0004875631
## 115 115 0.03197533 0.2202164 0.02459588 0.0004357065 0.012811735 0.0004902173
## 116 116 0.03197844 0.2200951 0.02459527 0.0004368546 0.012701220 0.0004878189
## 117 117 0.03198276 0.2198998 0.02460159 0.0004336826 0.012810230 0.0004868778
## 118 118 0.03198538 0.2197979 0.02460486 0.0004314110 0.012681344 0.0004870856
## 119 119 0.03199253 0.2194572 0.02460905 0.0004228555 0.012433358 0.0004790487
## 120 120 0.03198876 0.2196402 0.02460923 0.0004080884 0.012071264 0.0004725043
## 121 121 0.03199067 0.2196169 0.02460555 0.0004183646 0.012461511 0.0004849034
## 122 122 0.03199256 0.2195535 0.02460901 0.0004096506 0.012275160 0.0004841670
## 123 123 0.03200036 0.2192308 0.02461724 0.0004085294 0.012313870 0.0004848895
## 124 124 0.03200274 0.2191323 0.02462434 0.0004046559 0.012354054 0.0004831596
## 125 125 0.03199497 0.2194581 0.02462000 0.0003998163 0.012418233 0.0004808978
## 126 126 0.03199780 0.2193444 0.02461813 0.0003964520 0.012273548 0.0004740772
## 127 127 0.03199269 0.2195339 0.02461388 0.0003936131 0.012290632 0.0004668168
## 128 128 0.03199280 0.2195521 0.02461206 0.0003923724 0.012195472 0.0004680727
## 129 129 0.03199203 0.2196104 0.02461471 0.0004033317 0.012513040 0.0004710972
## 130 130 0.03200065 0.2192557 0.02462389 0.0004072913 0.012403967 0.0004740143
## 131 131 0.03200443 0.2190999 0.02462964 0.0004075810 0.012340590 0.0004733671
## 132 132 0.03200227 0.2191785 0.02462820 0.0004068304 0.012291402 0.0004731945
## 133 133 0.03199818 0.2193747 0.02462206 0.0004139843 0.012563080 0.0004766458
## 134 134 0.03200450 0.2190682 0.02462573 0.0004109641 0.012397671 0.0004704266
## 135 135 0.03200498 0.2190468 0.02462399 0.0004131926 0.012445676 0.0004761510
## 136 136 0.03200613 0.2190004 0.02462901 0.0004123680 0.012371961 0.0004748404
## 137 137 0.03200727 0.2189843 0.02462760 0.0004062887 0.012319205 0.0004736377
## 138 138 0.03201357 0.2187315 0.02463054 0.0004051912 0.012255584 0.0004717075
## 139 139 0.03201600 0.2186381 0.02463695 0.0004044712 0.012292443 0.0004773258
## 140 140 0.03201297 0.2187899 0.02463176 0.0004011368 0.012064689 0.0004723799
## 141 141 0.03201907 0.2185438 0.02463619 0.0004028372 0.011972054 0.0004739629
## 142 142 0.03201715 0.2186248 0.02463476 0.0004088710 0.012059851 0.0004799777
## 143 143 0.03201339 0.2187845 0.02463014 0.0004117787 0.012187322 0.0004824251
## 144 144 0.03201551 0.2186942 0.02463646 0.0004109910 0.012047056 0.0004787090
## 145 145 0.03201603 0.2186577 0.02463584 0.0004096517 0.012025420 0.0004745081
## 146 146 0.03201821 0.2185938 0.02464020 0.0004069249 0.012206702 0.0004789309
## 147 147 0.03202144 0.2184536 0.02464096 0.0004057848 0.012047906 0.0004738304
## 148 148 0.03202491 0.2182967 0.02464485 0.0004054507 0.012062392 0.0004733294
## 149 149 0.03202402 0.2183408 0.02464673 0.0004089829 0.012245345 0.0004796601
## 150 150 0.03202732 0.2182203 0.02464851 0.0004070482 0.012055773 0.0004768571
## 151 151 0.03202779 0.2181845 0.02464807 0.0004073058 0.011880055 0.0004777812
## 152 152 0.03202821 0.2181697 0.02465030 0.0004132279 0.012019290 0.0004778740
## 153 153 0.03202637 0.2182248 0.02464859 0.0004172483 0.012227081 0.0004867691
## 154 154 0.03202790 0.2181492 0.02464699 0.0004163870 0.012208333 0.0004863784
## 155 155 0.03202641 0.2182421 0.02464405 0.0004167966 0.012245291 0.0004897295
## 156 156 0.03202686 0.2182345 0.02464215 0.0004185581 0.012541466 0.0004911561
## 157 157 0.03202905 0.2181788 0.02464238 0.0004157252 0.012488900 0.0004842236
## 158 158 0.03203252 0.2180551 0.02464779 0.0004143472 0.012572315 0.0004851166
## 159 159 0.03203134 0.2181167 0.02464745 0.0004172041 0.012792942 0.0004859556
## 160 160 0.03202763 0.2182869 0.02464312 0.0004135173 0.012667205 0.0004841002
## 161 161 0.03202450 0.2184274 0.02464239 0.0004126504 0.012633972 0.0004798493
## 162 162 0.03202719 0.2183052 0.02464349 0.0004093425 0.012627506 0.0004804770
## 163 163 0.03202445 0.2184364 0.02464332 0.0004096586 0.012604900 0.0004804734
## 164 164 0.03202437 0.2184428 0.02464386 0.0004085438 0.012365089 0.0004793390
## 165 165 0.03202828 0.2182772 0.02464736 0.0004110513 0.012526845 0.0004810184
## 166 166 0.03202354 0.2185012 0.02464314 0.0004077717 0.012468839 0.0004788002
## 167 167 0.03202818 0.2183062 0.02464770 0.0004085363 0.012493263 0.0004815540
## 168 168 0.03202888 0.2182897 0.02465056 0.0004031690 0.012341309 0.0004770874
## 169 169 0.03202365 0.2185276 0.02464662 0.0004031298 0.012303635 0.0004790617
## 170 170 0.03202491 0.2184820 0.02464768 0.0004005603 0.012228929 0.0004785356
## 171 171 0.03202644 0.2184212 0.02464687 0.0004036579 0.012358045 0.0004777246
## 172 172 0.03203145 0.2182125 0.02464847 0.0004054542 0.012390482 0.0004790211
## 173 173 0.03202864 0.2183220 0.02464694 0.0004022013 0.012148425 0.0004775696
## 174 174 0.03203092 0.2182370 0.02464931 0.0004002696 0.012297834 0.0004785455
## 175 175 0.03202991 0.2182865 0.02465221 0.0004025953 0.012458492 0.0004796867
## 176 176 0.03202746 0.2184136 0.02464741 0.0004038045 0.012557819 0.0004833052
## 177 177 0.03203040 0.2183010 0.02464802 0.0004028177 0.012645659 0.0004848794
## 178 178 0.03202830 0.2184073 0.02464566 0.0004077398 0.012976182 0.0004882167
## 179 179 0.03203323 0.2181996 0.02464888 0.0004100925 0.013149481 0.0004909597
## 180 180 0.03203315 0.2182007 0.02464897 0.0004097242 0.013291769 0.0004912296
## 181 181 0.03203213 0.2182650 0.02464952 0.0004123080 0.013392097 0.0004920860
## 182 182 0.03203121 0.2182962 0.02464868 0.0004117637 0.013337862 0.0004935363
## 183 183 0.03202895 0.2184012 0.02464934 0.0004100496 0.013401071 0.0004927421
## 184 184 0.03202838 0.2184397 0.02465057 0.0004092269 0.013369048 0.0004914942
## 185 185 0.03203385 0.2182160 0.02465393 0.0004089608 0.013461364 0.0004893586
## 186 186 0.03203614 0.2181238 0.02465582 0.0004121234 0.013631914 0.0004917008
## 187 187 0.03203786 0.2180543 0.02465835 0.0004112577 0.013618136 0.0004915585
## 188 188 0.03203706 0.2180838 0.02465632 0.0004117941 0.013673513 0.0004915952
## 189 189 0.03203760 0.2180611 0.02465615 0.0004130466 0.013699703 0.0004910120
## 190 190 0.03203936 0.2179814 0.02465749 0.0004126056 0.013723363 0.0004903128
## 191 191 0.03204019 0.2179501 0.02466026 0.0004121301 0.013718301 0.0004900573
## 192 192 0.03204048 0.2179431 0.02466100 0.0004125569 0.013703403 0.0004889929
## 193 193 0.03204290 0.2178401 0.02466289 0.0004123712 0.013752741 0.0004910058
## 194 194 0.03204183 0.2178880 0.02466137 0.0004115415 0.013707036 0.0004901747
## 195 195 0.03203998 0.2179783 0.02465972 0.0004105472 0.013748916 0.0004911692
## 196 196 0.03203893 0.2180216 0.02466096 0.0004090095 0.013716180 0.0004887340
## 197 197 0.03203945 0.2180111 0.02466239 0.0004054981 0.013649128 0.0004875895
## 198 198 0.03204240 0.2178758 0.02466469 0.0004031775 0.013561815 0.0004856890
## 199 199 0.03204302 0.2178561 0.02466503 0.0004064311 0.013707305 0.0004870789
## 200 200 0.03204355 0.2178312 0.02466516 0.0004073092 0.013730107 0.0004879541
## 201 201 0.03204275 0.2178699 0.02466391 0.0004093955 0.013718745 0.0004900377
## 202 202 0.03204285 0.2178804 0.02466395 0.0004129338 0.013927219 0.0004931444
## 203 203 0.03204446 0.2178224 0.02466613 0.0004143265 0.013997608 0.0004947087
## 204 204 0.03204379 0.2178626 0.02466640 0.0004161424 0.014057517 0.0004950381
## 205 205 0.03204187 0.2179528 0.02466350 0.0004156100 0.014099973 0.0004946321
## 206 206 0.03204384 0.2178641 0.02466501 0.0004165036 0.014146625 0.0004947564
## 207 207 0.03204525 0.2177992 0.02466614 0.0004137824 0.013959729 0.0004917704
## 208 208 0.03204401 0.2178457 0.02466657 0.0004116662 0.013920357 0.0004912926
## 209 209 0.03204413 0.2178482 0.02466672 0.0004118464 0.013976820 0.0004899244
## 210 210 0.03204294 0.2178979 0.02466608 0.0004125073 0.014006849 0.0004909692
## 211 211 0.03204338 0.2178814 0.02466572 0.0004139246 0.014107343 0.0004932349
## 212 212 0.03204242 0.2179214 0.02466502 0.0004137913 0.014108414 0.0004919281
## 213 213 0.03204114 0.2179733 0.02466458 0.0004153016 0.014125523 0.0004920673
## 214 214 0.03204192 0.2179498 0.02466390 0.0004168656 0.014223170 0.0004931778
## 215 215 0.03204168 0.2179653 0.02466405 0.0004171961 0.014252707 0.0004931760
## 216 216 0.03204231 0.2179339 0.02466461 0.0004171828 0.014294998 0.0004937500
## 217 217 0.03204391 0.2178659 0.02466582 0.0004169446 0.014272795 0.0004940045
## 218 218 0.03204341 0.2178861 0.02466584 0.0004180990 0.014314121 0.0004944950
## 219 219 0.03204414 0.2178556 0.02466685 0.0004176249 0.014340552 0.0004947478
## 220 220 0.03204468 0.2178355 0.02466744 0.0004178702 0.014341123 0.0004946425
## 221 221 0.03204550 0.2177955 0.02466864 0.0004176923 0.014334627 0.0004942254
## 222 222 0.03204632 0.2177630 0.02466953 0.0004176042 0.014331596 0.0004935587
## 223 223 0.03204660 0.2177524 0.02466984 0.0004165990 0.014298300 0.0004934110
## 224 224 0.03204693 0.2177398 0.02466967 0.0004169454 0.014325805 0.0004942027
## 225 225 0.03204576 0.2177908 0.02466855 0.0004179333 0.014356347 0.0004946893
## 226 226 0.03204662 0.2177579 0.02466911 0.0004179622 0.014377432 0.0004941819
## 227 227 0.03204675 0.2177498 0.02466934 0.0004178367 0.014353149 0.0004932246
## 228 228 0.03204672 0.2177517 0.02466905 0.0004173251 0.014337451 0.0004927076
## 229 229 0.03204632 0.2177666 0.02466890 0.0004174308 0.014333460 0.0004934692
## 230 230 0.03204622 0.2177733 0.02466865 0.0004174209 0.014344627 0.0004934204
## 231 231 0.03204718 0.2177334 0.02466928 0.0004175185 0.014376226 0.0004934526
## 232 232 0.03204717 0.2177352 0.02466906 0.0004179389 0.014398808 0.0004939515
## 233 233 0.03204684 0.2177497 0.02466872 0.0004174698 0.014379015 0.0004935217
## 234 234 0.03204684 0.2177513 0.02466836 0.0004177045 0.014393585 0.0004936237
## 235 235 0.03204685 0.2177523 0.02466836 0.0004174856 0.014388373 0.0004937148
## 236 236 0.03204686 0.2177515 0.02466831 0.0004174937 0.014385940 0.0004936900
## 237 237 0.03204697 0.2177468 0.02466842 0.0004172423 0.014380789 0.0004934998
## 238 238 0.03204699 0.2177449 0.02466846 0.0004171936 0.014376422 0.0004936864
## 239 239 0.03204712 0.2177394 0.02466853 0.0004172112 0.014373434 0.0004935919
## 240 240 0.03204715 0.2177384 0.02466849 0.0004172579 0.014375919 0.0004936210
## [1] "Best Model"
## nvmax
## 12 12
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.997103e+00 1.9906386244 2.003568e+00
## x4 -5.427583e-05 -0.0000716205 -3.693116e-05
## x7 1.122390e-02 0.0099972365 1.245057e-02
## x9 3.330616e-03 0.0026964717 3.964760e-03
## x10 9.708542e-04 0.0003799704 1.561738e-03
## x16 8.621833e-04 0.0004505316 1.273835e-03
## x17 1.587577e-03 0.0009649579 2.210196e-03
## stat14 -8.668805e-04 -0.0013371562 -3.966047e-04
## stat98 3.548025e-03 0.0030790242 4.017027e-03
## stat100 8.029460e-04 0.0003257120 1.280180e-03
## stat106 -8.063776e-04 -0.0012800279 -3.327274e-04
## stat110 -3.375726e-03 -0.0038506405 -2.900812e-03
## x18.sqrt 2.705501e-02 0.0252462261 2.886379e-02
if (algo.backward.caret == TRUE){
test.model(model.backward, data.test
,method = 'leapBackward',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.040 2.085 2.098 2.098 2.111 2.149
## [1] "leapBackward Test MSE: 0.00105272538908082"
if (algo.stepwise.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "leapSeq"
,feature.names = feature.names)
model.stepwise = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03395310 0.1165510 0.02647398 0.0005979859 0.02327287 0.0005150109
## 2 2 0.03311925 0.1591907 0.02574789 0.0006352398 0.02370837 0.0005936152
## 3 3 0.03252663 0.1890502 0.02520706 0.0006234383 0.02367460 0.0005685038
## 4 4 0.03200521 0.2149257 0.02451286 0.0006236591 0.02426557 0.0006086824
## 5 5 0.03172206 0.2288497 0.02431828 0.0005121533 0.01962112 0.0005318708
## 6 6 0.03162055 0.2337592 0.02423701 0.0005501750 0.01958096 0.0005801727
## 7 7 0.03155536 0.2368763 0.02420893 0.0005463424 0.01815846 0.0005281206
## 8 8 0.03155004 0.2371337 0.02421475 0.0005328551 0.01824169 0.0005201855
## 9 9 0.03151227 0.2390118 0.02418230 0.0005701987 0.01953932 0.0005480818
## 10 10 0.03151991 0.2386407 0.02418932 0.0005934613 0.02050606 0.0005517946
## 11 11 0.03148801 0.2401440 0.02417114 0.0005839110 0.02025669 0.0005399899
## 12 12 0.03184873 0.2220054 0.02449398 0.0014731731 0.06337446 0.0012821381
## 13 13 0.03144593 0.2421064 0.02412569 0.0005595793 0.01829170 0.0004961523
## 14 14 0.03147217 0.2408524 0.02413358 0.0005546584 0.01777328 0.0004757058
## 15 15 0.03146269 0.2412932 0.02412038 0.0005699604 0.01794180 0.0004964014
## 16 16 0.03146471 0.2412243 0.02413481 0.0005822647 0.01826623 0.0005096920
## 17 17 0.03148108 0.2404943 0.02413768 0.0005893673 0.01807734 0.0005078095
## 18 18 0.03147515 0.2408168 0.02413956 0.0006073607 0.01890258 0.0005080548
## 19 19 0.03149985 0.2396299 0.02416492 0.0005785946 0.01721525 0.0004851554
## 20 20 0.03152209 0.2386842 0.02418103 0.0005837738 0.01742569 0.0005165697
## 21 21 0.03212847 0.2080393 0.02469102 0.0014780168 0.06889381 0.0012032833
## 22 22 0.03155952 0.2369302 0.02421275 0.0005660854 0.01630714 0.0005193433
## 23 23 0.03156639 0.2365991 0.02421827 0.0005658361 0.01662496 0.0005131652
## 24 24 0.03158130 0.2359051 0.02423987 0.0005243793 0.01471373 0.0004765692
## 25 25 0.03194440 0.2175277 0.02456111 0.0012270693 0.05726959 0.0009206491
## 26 26 0.03161106 0.2345785 0.02427067 0.0005133071 0.01432394 0.0004680176
## 27 27 0.03161533 0.2343802 0.02426568 0.0005152906 0.01400600 0.0004643490
## 28 28 0.03205550 0.2124290 0.02461237 0.0012667498 0.06343515 0.0011264580
## 29 29 0.03267559 0.1806967 0.02514155 0.0014046343 0.07872716 0.0012850298
## 30 30 0.03237678 0.1965721 0.02495738 0.0017603872 0.07618532 0.0015112509
## 31 31 0.03166738 0.2320288 0.02431130 0.0005180245 0.01384250 0.0004630981
## 32 32 0.03236741 0.1966796 0.02490932 0.0016820244 0.07709901 0.0014919869
## 33 33 0.03200153 0.2150236 0.02458434 0.0012600108 0.06049046 0.0011383821
## 34 34 0.03204919 0.2131457 0.02467205 0.0013574399 0.05874105 0.0011889504
## 35 35 0.03202732 0.2137673 0.02464587 0.0010704532 0.05153374 0.0009548908
## 36 36 0.03235856 0.1971477 0.02493137 0.0014006177 0.06591519 0.0011795833
## 37 37 0.03200264 0.2149487 0.02461549 0.0008589623 0.04560037 0.0007536462
## 38 38 0.03170571 0.2304187 0.02434726 0.0004796983 0.01239036 0.0004552843
## 39 39 0.03201673 0.2145932 0.02458537 0.0012207169 0.05401021 0.0009014739
## 40 40 0.03236383 0.1965836 0.02491781 0.0014806049 0.07387941 0.0013467407
## 41 41 0.03172084 0.2297956 0.02436001 0.0004737923 0.01323157 0.0004641150
## 42 42 0.03211064 0.2101020 0.02467107 0.0011990611 0.06091835 0.0010819174
## 43 43 0.03238499 0.1958848 0.02492895 0.0012744333 0.06546844 0.0009891890
## 44 44 0.03207152 0.2123433 0.02461499 0.0012140263 0.05388389 0.0008993639
## 45 45 0.03198884 0.2162901 0.02457373 0.0009655194 0.04283357 0.0009087003
## 46 46 0.03208268 0.2118722 0.02464856 0.0012068023 0.05311693 0.0008942893
## 47 47 0.03206137 0.2125923 0.02465802 0.0008222183 0.04348369 0.0007429729
## 48 48 0.03179386 0.2266408 0.02442218 0.0004686127 0.01232010 0.0004729123
## 49 49 0.03201119 0.2153167 0.02459122 0.0009604959 0.04252321 0.0008883017
## 50 50 0.03271132 0.1797871 0.02519713 0.0015436664 0.07529546 0.0011137124
## 51 51 0.03240974 0.1950612 0.02495802 0.0012758090 0.06519868 0.0010009087
## 52 52 0.03326919 0.1512376 0.02566391 0.0017404135 0.08206788 0.0014626592
## 53 53 0.03213600 0.2094114 0.02472960 0.0011890261 0.05594500 0.0008997786
## 54 54 0.03207164 0.2122795 0.02465893 0.0008142254 0.04389233 0.0007620422
## 55 55 0.03216158 0.2086342 0.02475174 0.0013338822 0.05814626 0.0011477095
## 56 56 0.03256895 0.1879242 0.02509066 0.0014735476 0.07473651 0.0012108743
## 57 57 0.03238430 0.1965166 0.02491721 0.0012522536 0.06179489 0.0009728607
## 58 58 0.03233331 0.1989445 0.02485335 0.0010981538 0.05647196 0.0010373465
## 59 59 0.03217643 0.2076546 0.02476996 0.0010279961 0.05078756 0.0009474076
## 60 60 0.03248055 0.1919149 0.02500799 0.0012248322 0.06714022 0.0011294005
## 61 61 0.03213967 0.2096055 0.02471601 0.0011322728 0.05029136 0.0008272489
## 62 62 0.03241068 0.1953921 0.02495635 0.0012779057 0.06269239 0.0011904630
## 63 63 0.03221333 0.2064732 0.02478715 0.0013116779 0.05593660 0.0010993967
## 64 64 0.03252520 0.1904833 0.02504122 0.0016509282 0.07573418 0.0014307331
## 65 65 0.03280750 0.1752916 0.02529316 0.0014175111 0.07896723 0.0011483550
## 66 66 0.03279472 0.1761027 0.02528510 0.0014825821 0.07654878 0.0013708636
## 67 67 0.03218170 0.2080634 0.02477862 0.0013064847 0.05693989 0.0010989741
## 68 68 0.03184739 0.2246896 0.02450078 0.0004059793 0.01033402 0.0004536262
## 69 69 0.03184444 0.2249007 0.02449440 0.0004022274 0.00993389 0.0004520839
## 70 70 0.03245817 0.1938622 0.02498498 0.0015137014 0.06709098 0.0013019496
## 71 71 0.03185965 0.2242984 0.02452095 0.0004316430 0.01120973 0.0004729938
## 72 72 0.03320193 0.1560513 0.02562994 0.0017367475 0.08641435 0.0015335373
## 73 73 0.03245604 0.1932452 0.02503014 0.0011472444 0.06165187 0.0010869103
## 74 74 0.03277045 0.1776035 0.02528658 0.0015505375 0.07598313 0.0013033162
## 75 75 0.03186375 0.2242037 0.02453723 0.0004377491 0.01184014 0.0004839962
## 76 76 0.03187327 0.2237891 0.02454330 0.0004217406 0.01150808 0.0004820495
## 77 77 0.03227235 0.2038147 0.02487504 0.0011317406 0.05835914 0.0010259302
## 78 78 0.03245754 0.1936742 0.02499352 0.0012599256 0.06235790 0.0011698368
## 79 79 0.03244581 0.1942184 0.02498753 0.0011355282 0.06029775 0.0008510651
## 80 80 0.03252890 0.1907121 0.02505595 0.0015330764 0.07239424 0.0012287182
## 81 81 0.03215542 0.2090305 0.02475367 0.0007993302 0.04367981 0.0007691687
## 82 82 0.03191309 0.2222477 0.02455649 0.0004297897 0.01128211 0.0004957637
## 83 83 0.03322277 0.1549856 0.02567907 0.0015168273 0.08148349 0.0012785349
## 84 84 0.03247424 0.1928723 0.02502002 0.0010168915 0.05991675 0.0009635334
## 85 85 0.03223581 0.2059809 0.02481420 0.0012068344 0.05830730 0.0011006013
## 86 86 0.03191537 0.2222918 0.02456698 0.0004223281 0.01138868 0.0004749841
## 87 87 0.03222945 0.2064340 0.02481380 0.0010844437 0.04823423 0.0007731673
## 88 88 0.03214102 0.2099637 0.02473107 0.0006374666 0.04222347 0.0006431109
## 89 89 0.03190718 0.2227566 0.02455448 0.0004189663 0.01194565 0.0004869470
## 90 90 0.03263940 0.1863473 0.02516789 0.0016037247 0.07692826 0.0013544024
## 91 91 0.03310502 0.1610550 0.02550354 0.0016924729 0.08267717 0.0013960702
## 92 92 0.03247089 0.1933711 0.02498402 0.0012381253 0.06803826 0.0011482428
## 93 93 0.03191375 0.2225811 0.02455590 0.0004305750 0.01248926 0.0005023323
## 94 94 0.03259148 0.1888720 0.02508162 0.0016827263 0.07778648 0.0014214669
## 95 95 0.03277902 0.1780719 0.02524801 0.0015908873 0.07469680 0.0011967627
## 96 96 0.03255532 0.1895759 0.02508296 0.0014175258 0.06716063 0.0011976467
## 97 97 0.03218165 0.2079301 0.02478160 0.0008291764 0.04599857 0.0007985118
## 98 98 0.03256169 0.1897412 0.02509688 0.0013438655 0.06856523 0.0012390432
## 99 99 0.03219236 0.2074805 0.02478691 0.0008271381 0.04581863 0.0007909908
## 100 100 0.03229162 0.2039826 0.02485508 0.0012174547 0.05857352 0.0011176308
## 101 101 0.03224704 0.2059389 0.02481858 0.0010787725 0.04789992 0.0007749846
## 102 102 0.03292458 0.1710560 0.02538603 0.0016270469 0.08209386 0.0013853270
## 103 103 0.03243904 0.1939688 0.02496610 0.0009348666 0.06062438 0.0008930439
## 104 104 0.03224601 0.2060828 0.02481624 0.0010703844 0.04751235 0.0007730577
## 105 105 0.03229212 0.2040271 0.02484721 0.0012251169 0.05886084 0.0011124415
## 106 106 0.03196891 0.2203750 0.02459769 0.0004269987 0.01227854 0.0004882142
## 107 107 0.03196630 0.2204764 0.02459876 0.0004213159 0.01204677 0.0004782271
## 108 108 0.03220721 0.2071252 0.02477733 0.0006761368 0.04531076 0.0006752679
## 109 109 0.03223640 0.2065201 0.02481654 0.0010624755 0.04721262 0.0007676363
## 110 110 0.03196232 0.2207343 0.02459258 0.0004347697 0.01260227 0.0004922456
## 111 111 0.03195893 0.2209103 0.02458482 0.0004380865 0.01295149 0.0004960261
## 112 112 0.03267515 0.1849861 0.02517660 0.0015063488 0.07671866 0.0013660837
## 113 113 0.03264745 0.1869476 0.02512542 0.0017009324 0.07374746 0.0013641511
## 114 114 0.03196234 0.2207785 0.02458635 0.0004482535 0.01310261 0.0004922265
## 115 115 0.03237201 0.2004928 0.02493519 0.0011413515 0.05857853 0.0010445074
## 116 116 0.03230169 0.2035424 0.02488784 0.0009596714 0.04779503 0.0009539327
## 117 117 0.03273051 0.1826874 0.02525163 0.0014951587 0.07515489 0.0012241282
## 118 118 0.03249319 0.1921215 0.02499372 0.0012217334 0.06425470 0.0011315586
## 119 119 0.03255599 0.1900934 0.02508354 0.0012342067 0.06159680 0.0011940700
## 120 120 0.03225352 0.2061942 0.02486096 0.0007682178 0.03758119 0.0008194699
## 121 121 0.03198620 0.2197883 0.02460674 0.0004212629 0.01249100 0.0004797151
## 122 122 0.03276812 0.1801435 0.02525104 0.0014914806 0.06793973 0.0013902351
## 123 123 0.03231789 0.2032845 0.02490064 0.0009184660 0.04764970 0.0008805514
## 124 124 0.03199751 0.2193295 0.02461767 0.0004102736 0.01272622 0.0004768962
## 125 125 0.03276891 0.1794521 0.02528211 0.0013159271 0.06683361 0.0011811383
## 126 126 0.03247421 0.1942025 0.02505483 0.0010913231 0.05604820 0.0009494220
## 127 127 0.03252281 0.1928671 0.02509021 0.0012267879 0.05592004 0.0008997782
## 128 128 0.03224831 0.2066292 0.02483289 0.0009350485 0.04049978 0.0006907369
## 129 129 0.03224155 0.2069791 0.02482782 0.0009429837 0.04066350 0.0007005084
## 130 130 0.03273405 0.1810483 0.02521850 0.0012950031 0.06566147 0.0011736349
## 131 131 0.03222808 0.2073675 0.02480923 0.0009522291 0.04595325 0.0009168453
## 132 132 0.03199819 0.2193532 0.02462037 0.0004070511 0.01234632 0.0004653673
## 133 133 0.03262434 0.1877941 0.02517276 0.0012912991 0.05843775 0.0011193690
## 134 134 0.03260221 0.1887039 0.02513413 0.0008730932 0.04519673 0.0008835705
## 135 135 0.03233558 0.2012657 0.02493517 0.0008137642 0.04073683 0.0007277000
## 136 136 0.03247469 0.1956266 0.02504258 0.0009172432 0.04623991 0.0007731454
## 137 137 0.03227808 0.2059560 0.02481401 0.0009977424 0.03997496 0.0007595931
## 138 138 0.03223041 0.2081033 0.02480189 0.0009659676 0.03973886 0.0008254731
## 139 139 0.03234726 0.1998533 0.02491803 0.0006629900 0.04495050 0.0007529342
## 140 140 0.03200834 0.2189611 0.02462512 0.0004041163 0.01232630 0.0004789983
## 141 141 0.03223250 0.2080385 0.02480428 0.0009638559 0.03964141 0.0008291287
## 142 142 0.03228188 0.2054740 0.02485515 0.0007515058 0.03836047 0.0007044267
## 143 143 0.03202079 0.2184392 0.02463876 0.0004110455 0.01221884 0.0004820880
## 144 144 0.03221822 0.2085239 0.02481748 0.0005700493 0.02577203 0.0006145762
## 145 145 0.03223042 0.2076941 0.02483141 0.0008642412 0.04061057 0.0009064903
## 146 146 0.03228806 0.2056105 0.02482485 0.0009881780 0.03938999 0.0007502610
## 147 147 0.03274661 0.1827110 0.02521779 0.0010689843 0.04958638 0.0008460955
## 148 148 0.03243204 0.1967947 0.02499267 0.0008205200 0.04569460 0.0007838225
## 149 149 0.03221866 0.2085268 0.02481981 0.0005708683 0.02609224 0.0006109911
## 150 150 0.03201957 0.2185470 0.02463834 0.0004090180 0.01228857 0.0004837501
## 151 151 0.03202558 0.2182892 0.02464091 0.0004181429 0.01251580 0.0004928991
## 152 152 0.03270294 0.1845964 0.02521235 0.0012731242 0.05643624 0.0010633921
## 153 153 0.03278722 0.1812390 0.02523986 0.0013479444 0.05845112 0.0010498450
## 154 154 0.03202504 0.2182697 0.02464628 0.0004205871 0.01242485 0.0004929492
## 155 155 0.03265302 0.1867438 0.02522298 0.0010074939 0.04518490 0.0007528754
## 156 156 0.03222771 0.2081217 0.02482250 0.0005874540 0.02680212 0.0006221371
## 157 157 0.03270870 0.1843103 0.02516930 0.0011813835 0.05377377 0.0010168031
## 158 158 0.03229328 0.2054845 0.02482781 0.0009847519 0.03914147 0.0007610153
## 159 159 0.03202792 0.2182713 0.02464466 0.0004151162 0.01264620 0.0004848251
## 160 160 0.03238812 0.1987845 0.02496943 0.0009597893 0.04904357 0.0009947980
## 161 161 0.03227665 0.2064639 0.02484106 0.0010781589 0.04409007 0.0008980820
## 162 162 0.03216183 0.2102300 0.02477236 0.0005967733 0.03172823 0.0006526770
## 163 163 0.03223488 0.2070792 0.02482161 0.0005894582 0.03877708 0.0006903999
## 164 164 0.03238617 0.1998705 0.02494804 0.0008043542 0.03787564 0.0008456527
## 165 165 0.03216329 0.2101910 0.02477800 0.0005934308 0.03134807 0.0006527415
## 166 166 0.03249855 0.1947910 0.02503398 0.0011355635 0.05551213 0.0010123791
## 167 167 0.03244088 0.1973219 0.02499503 0.0009368753 0.04681932 0.0009376143
## 168 168 0.03228589 0.2055065 0.02487336 0.0007608238 0.03890123 0.0007262421
## 169 169 0.03230165 0.2052744 0.02484687 0.0010006253 0.03993630 0.0007826227
## 170 170 0.03224055 0.2074991 0.02484127 0.0008665019 0.04084390 0.0009097814
## 171 171 0.03240835 0.1989010 0.02496936 0.0008324399 0.03954443 0.0008611168
## 172 172 0.03203058 0.2182537 0.02464912 0.0004053435 0.01247713 0.0004797818
## 173 173 0.03252211 0.1934939 0.02503397 0.0010378470 0.05189331 0.0008985585
## 174 174 0.03224072 0.2074767 0.02483483 0.0008580919 0.04042029 0.0008979693
## 175 175 0.03252615 0.1945060 0.02508811 0.0012921412 0.05302872 0.0010314613
## 176 176 0.03250902 0.1936926 0.02504772 0.0008261986 0.05178630 0.0008263191
## 177 177 0.03203113 0.2182648 0.02464919 0.0004041137 0.01268905 0.0004860247
## 178 178 0.03231748 0.2047896 0.02485618 0.0010426543 0.04154536 0.0008258071
## 179 179 0.03229366 0.2061254 0.02486729 0.0011045980 0.04476148 0.0009632621
## 180 180 0.03203363 0.2181793 0.02464923 0.0004097293 0.01328810 0.0004910026
## 181 181 0.03203262 0.2182431 0.02464978 0.0004123029 0.01338778 0.0004918606
## 182 182 0.03216675 0.2101145 0.02478349 0.0006171742 0.03280067 0.0006832831
## 183 183 0.03243627 0.1978111 0.02503309 0.0010142128 0.04815672 0.0009713874
## 184 184 0.03241152 0.1981697 0.02497885 0.0008374812 0.04870270 0.0009591808
## 185 185 0.03217392 0.2098022 0.02479264 0.0006269312 0.03354653 0.0006883656
## 186 186 0.03253593 0.1942254 0.02509224 0.0012952454 0.05330553 0.0010316017
## 187 187 0.03203769 0.2180592 0.02465822 0.0004111140 0.01361245 0.0004913840
## 188 188 0.03233420 0.2041285 0.02487380 0.0010551202 0.04212003 0.0008287384
## 189 189 0.03203884 0.2180051 0.02465887 0.0004120914 0.01364064 0.0004882045
## 190 190 0.03247820 0.1956016 0.02501403 0.0011236554 0.04985944 0.0009335298
## 191 191 0.03254765 0.1932959 0.02506211 0.0012613383 0.05460769 0.0010925566
## 192 192 0.03251778 0.1942138 0.02509406 0.0009283089 0.04183204 0.0007634255
## 193 193 0.03204134 0.2179064 0.02466164 0.0004108572 0.01371604 0.0004920245
## 194 194 0.03203941 0.2179878 0.02466149 0.0004081276 0.01359105 0.0004903248
## 195 195 0.03224326 0.2078963 0.02486026 0.0007672529 0.03427124 0.0006916765
## 196 196 0.03224510 0.2074561 0.02484573 0.0008614054 0.04076871 0.0009029696
## 197 197 0.03251820 0.1949495 0.02507644 0.0013228786 0.05750693 0.0012049789
## 198 198 0.03204257 0.2178645 0.02466503 0.0004032203 0.01357329 0.0004858109
## 199 199 0.03204302 0.2178561 0.02466503 0.0004064311 0.01370731 0.0004870789
## 200 200 0.03224512 0.2074482 0.02484424 0.0008477692 0.04025308 0.0008852864
## 201 201 0.03204245 0.2178859 0.02466378 0.0004091036 0.01370401 0.0004897748
## 202 202 0.03204223 0.2179068 0.02466325 0.0004123204 0.01390280 0.0004917492
## 203 203 0.03204446 0.2178224 0.02466613 0.0004143265 0.01399761 0.0004947087
## 204 204 0.03220160 0.2096140 0.02480222 0.0007429068 0.03304270 0.0008365693
## 205 205 0.03241262 0.1979600 0.02500455 0.0009917937 0.05112872 0.0010214392
## 206 206 0.03204384 0.2178641 0.02466501 0.0004165036 0.01414663 0.0004947564
## 207 207 0.03204526 0.2177972 0.02466537 0.0004137923 0.01396160 0.0004902230
## 208 208 0.03204458 0.2178226 0.02466569 0.0004122505 0.01394233 0.0004894946
## 209 209 0.03204469 0.2178272 0.02466659 0.0004124110 0.01399642 0.0004896656
## 210 210 0.03204371 0.2178649 0.02466620 0.0004132771 0.01403759 0.0004912000
## 211 211 0.03252178 0.1940144 0.02506667 0.0010566934 0.05358167 0.0009937913
## 212 212 0.03224910 0.2073374 0.02484310 0.0008687706 0.04109896 0.0008880180
## 213 213 0.03204114 0.2179733 0.02466458 0.0004153016 0.01412552 0.0004920673
## 214 214 0.03242011 0.1989513 0.02500282 0.0009891965 0.04522300 0.0009555375
## 215 215 0.03220694 0.2085445 0.02482849 0.0006954537 0.03684053 0.0007516839
## 216 216 0.03204231 0.2179339 0.02466461 0.0004171828 0.01429500 0.0004937500
## 217 217 0.03204409 0.2178578 0.02466632 0.0004166875 0.01427576 0.0004938043
## 218 218 0.03204341 0.2178861 0.02466584 0.0004180990 0.01431412 0.0004944950
## 219 219 0.03255287 0.1930910 0.02511229 0.0012070524 0.05869586 0.0011259776
## 220 220 0.03224923 0.2073620 0.02484100 0.0008682909 0.04100271 0.0008766245
## 221 221 0.03204496 0.2178199 0.02466793 0.0004171791 0.01430380 0.0004933065
## 222 222 0.03204632 0.2177630 0.02466953 0.0004176042 0.01433160 0.0004935587
## 223 223 0.03268528 0.1856915 0.02523764 0.0010338821 0.05053945 0.0009508420
## 224 224 0.03204693 0.2177398 0.02466967 0.0004169454 0.01432580 0.0004942027
## 225 225 0.03221830 0.2080795 0.02484549 0.0007122181 0.03764178 0.0007799152
## 226 226 0.03204662 0.2177579 0.02466911 0.0004179622 0.01437743 0.0004941819
## 227 227 0.03246991 0.1958102 0.02508345 0.0010146217 0.04858700 0.0009079991
## 228 228 0.03204672 0.2177517 0.02466905 0.0004173251 0.01433745 0.0004927076
## 229 229 0.03235679 0.2035329 0.02490493 0.0010924840 0.04306969 0.0008843012
## 230 230 0.03204622 0.2177733 0.02466865 0.0004174209 0.01434463 0.0004934204
## 231 231 0.03256779 0.1918078 0.02511183 0.0009285572 0.05531399 0.0009053586
## 232 232 0.03249324 0.1952840 0.02507393 0.0009057634 0.05100131 0.0008684669
## 233 233 0.03260154 0.1914868 0.02513696 0.0012758064 0.05135465 0.0009763684
## 234 234 0.03246891 0.1953513 0.02506464 0.0008694993 0.05369181 0.0009584522
## 235 235 0.03258063 0.1918501 0.02513908 0.0010876377 0.05127604 0.0008708461
## 236 236 0.03272542 0.1852331 0.02527224 0.0014131674 0.06035398 0.0012766004
## 237 237 0.03204697 0.2177468 0.02466842 0.0004172423 0.01438079 0.0004934998
## 238 238 0.03252912 0.1946728 0.02508867 0.0013450286 0.05811040 0.0012272731
## 239 239 0.03333210 0.1544188 0.02577821 0.0011369702 0.06168369 0.0010416997
## 240 240 0.03204715 0.2177384 0.02466849 0.0004172579 0.01437592 0.0004936210
## [1] "Best Model"
## nvmax
## 13 13
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.997028e+00 1.990567e+00 2.003489e+00
## x4 -5.353737e-05 -7.087999e-05 -3.619475e-05
## x7 1.124268e-02 1.001665e-02 1.246870e-02
## x9 3.331630e-03 2.697855e-03 3.965405e-03
## x10 9.724997e-04 3.819591e-04 1.563040e-03
## x16 8.623206e-04 4.509088e-04 1.273732e-03
## x17 1.580806e-03 9.585313e-04 2.203081e-03
## stat14 -8.708250e-04 -1.340835e-03 -4.008148e-04
## stat98 3.526849e-03 3.057876e-03 3.995822e-03
## stat100 8.080842e-04 3.311141e-04 1.285054e-03
## stat106 -8.188471e-04 -1.292306e-03 -3.453887e-04
## stat110 -3.383503e-03 -3.858173e-03 -2.908833e-03
## stat149 -6.693717e-04 -1.148576e-03 -1.901673e-04
## x18.sqrt 2.706707e-02 2.525932e-02 2.887482e-02
if (algo.stepwise.caret == TRUE){
test.model(model.stepwise, data.test
,method = 'leapSeq',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.039 2.085 2.098 2.098 2.111 2.149
## [1] "leapSeq Test MSE: 0.00105329564605849"
if (algo.LASSO.caret == TRUE){
set.seed(1)
tune.grid= expand.grid(alpha = 1,lambda = 10^seq(from=-4,to=0,length=100))
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "glmnet"
,subopt = 'LASSO'
,tune.grid = tune.grid
,feature.names = feature.names)
model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo, : There were missing values in resampled
## performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.000586 on full training set
## glmnet
##
## 5584 samples
## 240 predictor
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ...
## Resampling results across tuning parameters:
##
## lambda RMSE Rsquared MAE
## 0.0001000000 0.03182813 0.2253562 0.02449186
## 0.0001097499 0.03181112 0.2259910 0.02447799
## 0.0001204504 0.03179307 0.2266740 0.02446355
## 0.0001321941 0.03177451 0.2273817 0.02444897
## 0.0001450829 0.03175577 0.2281017 0.02443435
## 0.0001592283 0.03173614 0.2288707 0.02441906
## 0.0001747528 0.03171548 0.2296984 0.02440305
## 0.0001917910 0.03169422 0.2305688 0.02438644
## 0.0002104904 0.03167307 0.2314521 0.02436961
## 0.0002310130 0.03165183 0.2323596 0.02435252
## 0.0002535364 0.03163076 0.2332832 0.02433557
## 0.0002782559 0.03161027 0.2342092 0.02431981
## 0.0003053856 0.03159104 0.2351106 0.02430576
## 0.0003351603 0.03157306 0.2359923 0.02429385
## 0.0003678380 0.03155629 0.2368604 0.02428260
## 0.0004037017 0.03154248 0.2376298 0.02427339
## 0.0004430621 0.03153181 0.2382896 0.02426804
## 0.0004862602 0.03152371 0.2388754 0.02426503
## 0.0005336699 0.03151743 0.2394271 0.02426238
## 0.0005857021 0.03151460 0.2398563 0.02426227
## 0.0006428073 0.03151566 0.2401489 0.02426656
## 0.0007054802 0.03152134 0.2402807 0.02427523
## 0.0007742637 0.03153212 0.2402287 0.02428953
## 0.0008497534 0.03154937 0.2399158 0.02431137
## 0.0009326033 0.03157481 0.2392534 0.02434212
## 0.0010235310 0.03160756 0.2383040 0.02438004
## 0.0011233240 0.03164653 0.2371204 0.02442317
## 0.0012328467 0.03169135 0.2357430 0.02447029
## 0.0013530478 0.03174183 0.2341917 0.02452062
## 0.0014849683 0.03179419 0.2327135 0.02457176
## 0.0016297508 0.03184933 0.2313027 0.02462588
## 0.0017886495 0.03191046 0.2298038 0.02468683
## 0.0019630407 0.03198163 0.2280068 0.02475751
## 0.0021544347 0.03206331 0.2259215 0.02483808
## 0.0023644894 0.03215302 0.2237447 0.02492549
## 0.0025950242 0.03224989 0.2216157 0.02501889
## 0.0028480359 0.03235843 0.2193553 0.02511963
## 0.0031257158 0.03248718 0.2164150 0.02523546
## 0.0034304693 0.03264161 0.2123620 0.02536902
## 0.0037649358 0.03282639 0.2067107 0.02552264
## 0.0041320124 0.03303848 0.1994647 0.02569510
## 0.0045348785 0.03325951 0.1920874 0.02587444
## 0.0049770236 0.03351078 0.1825193 0.02607501
## 0.0054622772 0.03380544 0.1685396 0.02630636
## 0.0059948425 0.03411327 0.1520260 0.02654082
## 0.0065793322 0.03438717 0.1381643 0.02673993
## 0.0072208090 0.03463785 0.1263829 0.02691716
## 0.0079248290 0.03486555 0.1169824 0.02707925
## 0.0086974900 0.03505174 0.1165510 0.02720910
## 0.0095454846 0.03527187 0.1165510 0.02736404
## 0.0104761575 0.03553518 0.1165510 0.02754949
## 0.0114975700 0.03584973 0.1165510 0.02777352
## 0.0126185688 0.03609949 NaN 0.02795295
## 0.0138488637 0.03609949 NaN 0.02795295
## 0.0151991108 0.03609949 NaN 0.02795295
## 0.0166810054 0.03609949 NaN 0.02795295
## 0.0183073828 0.03609949 NaN 0.02795295
## 0.0200923300 0.03609949 NaN 0.02795295
## 0.0220513074 0.03609949 NaN 0.02795295
## 0.0242012826 0.03609949 NaN 0.02795295
## 0.0265608778 0.03609949 NaN 0.02795295
## 0.0291505306 0.03609949 NaN 0.02795295
## 0.0319926714 0.03609949 NaN 0.02795295
## 0.0351119173 0.03609949 NaN 0.02795295
## 0.0385352859 0.03609949 NaN 0.02795295
## 0.0422924287 0.03609949 NaN 0.02795295
## 0.0464158883 0.03609949 NaN 0.02795295
## 0.0509413801 0.03609949 NaN 0.02795295
## 0.0559081018 0.03609949 NaN 0.02795295
## 0.0613590727 0.03609949 NaN 0.02795295
## 0.0673415066 0.03609949 NaN 0.02795295
## 0.0739072203 0.03609949 NaN 0.02795295
## 0.0811130831 0.03609949 NaN 0.02795295
## 0.0890215085 0.03609949 NaN 0.02795295
## 0.0977009957 0.03609949 NaN 0.02795295
## 0.1072267222 0.03609949 NaN 0.02795295
## 0.1176811952 0.03609949 NaN 0.02795295
## 0.1291549665 0.03609949 NaN 0.02795295
## 0.1417474163 0.03609949 NaN 0.02795295
## 0.1555676144 0.03609949 NaN 0.02795295
## 0.1707352647 0.03609949 NaN 0.02795295
## 0.1873817423 0.03609949 NaN 0.02795295
## 0.2056512308 0.03609949 NaN 0.02795295
## 0.2257019720 0.03609949 NaN 0.02795295
## 0.2477076356 0.03609949 NaN 0.02795295
## 0.2718588243 0.03609949 NaN 0.02795295
## 0.2983647240 0.03609949 NaN 0.02795295
## 0.3274549163 0.03609949 NaN 0.02795295
## 0.3593813664 0.03609949 NaN 0.02795295
## 0.3944206059 0.03609949 NaN 0.02795295
## 0.4328761281 0.03609949 NaN 0.02795295
## 0.4750810162 0.03609949 NaN 0.02795295
## 0.5214008288 0.03609949 NaN 0.02795295
## 0.5722367659 0.03609949 NaN 0.02795295
## 0.6280291442 0.03609949 NaN 0.02795295
## 0.6892612104 0.03609949 NaN 0.02795295
## 0.7564633276 0.03609949 NaN 0.02795295
## 0.8302175681 0.03609949 NaN 0.02795295
## 0.9111627561 0.03609949 NaN 0.02795295
## 1.0000000000 0.03609949 NaN 0.02795295
##
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.0005857021.
## alpha lambda
## 20 1 0.0005857021
## alpha lambda RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.0001000000 0.03182813 0.2253562 0.02449186 0.0004112926 0.01290638 0.0004801121
## 2 1 0.0001097499 0.03181112 0.2259910 0.02447799 0.0004128980 0.01288284 0.0004802153
## 3 1 0.0001204504 0.03179307 0.2266740 0.02446355 0.0004148860 0.01287114 0.0004802119
## 4 1 0.0001321941 0.03177451 0.2273817 0.02444897 0.0004170847 0.01285876 0.0004799991
## 5 1 0.0001450829 0.03175577 0.2281017 0.02443435 0.0004193830 0.01284186 0.0004793726
## 6 1 0.0001592283 0.03173614 0.2288707 0.02441906 0.0004221900 0.01285915 0.0004786759
## 7 1 0.0001747528 0.03171548 0.2296984 0.02440305 0.0004254847 0.01292004 0.0004779857
## 8 1 0.0001917910 0.03169422 0.2305688 0.02438644 0.0004291780 0.01302572 0.0004779026
## 9 1 0.0002104904 0.03167307 0.2314521 0.02436961 0.0004336163 0.01317191 0.0004778846
## 10 1 0.0002310130 0.03165183 0.2323596 0.02435252 0.0004387653 0.01331730 0.0004779045
## 11 1 0.0002535364 0.03163076 0.2332832 0.02433557 0.0004451254 0.01348939 0.0004780568
## 12 1 0.0002782559 0.03161027 0.2342092 0.02431981 0.0004525760 0.01369484 0.0004785112
## 13 1 0.0003053856 0.03159104 0.2351106 0.02430576 0.0004599844 0.01391456 0.0004784586
## 14 1 0.0003351603 0.03157306 0.2359923 0.02429385 0.0004673756 0.01416774 0.0004792883
## 15 1 0.0003678380 0.03155629 0.2368604 0.02428260 0.0004763853 0.01452065 0.0004815531
## 16 1 0.0004037017 0.03154248 0.2376298 0.02427339 0.0004858824 0.01497961 0.0004850470
## 17 1 0.0004430621 0.03153181 0.2382896 0.02426804 0.0004942196 0.01547203 0.0004881584
## 18 1 0.0004862602 0.03152371 0.2388754 0.02426503 0.0005014514 0.01595827 0.0004904953
## 19 1 0.0005336699 0.03151743 0.2394271 0.02426238 0.0005088959 0.01647134 0.0004922619
## 20 1 0.0005857021 0.03151460 0.2398563 0.02426227 0.0005174512 0.01704780 0.0004942982
## 21 1 0.0006428073 0.03151566 0.2401489 0.02426656 0.0005259063 0.01763846 0.0004998141
## 22 1 0.0007054802 0.03152134 0.2402807 0.02427523 0.0005321161 0.01820573 0.0005058613
## 23 1 0.0007742637 0.03153212 0.2402287 0.02428953 0.0005363799 0.01878708 0.0005107317
## 24 1 0.0008497534 0.03154937 0.2399158 0.02431137 0.0005389418 0.01935136 0.0005142049
## 25 1 0.0009326033 0.03157481 0.2392534 0.02434212 0.0005386738 0.01980910 0.0005164134
## 26 1 0.0010235310 0.03160756 0.2383040 0.02438004 0.0005359278 0.02014906 0.0005171759
## 27 1 0.0011233240 0.03164653 0.2371204 0.02442317 0.0005308051 0.02034275 0.0005151084
## 28 1 0.0012328467 0.03169135 0.2357430 0.02447029 0.0005252130 0.02046593 0.0005112222
## 29 1 0.0013530478 0.03174183 0.2341917 0.02452062 0.0005202007 0.02049331 0.0005085230
## 30 1 0.0014849683 0.03179419 0.2327135 0.02457176 0.0005159148 0.02055282 0.0005055733
## 31 1 0.0016297508 0.03184933 0.2313027 0.02462588 0.0005137985 0.02081815 0.0005037508
## 32 1 0.0017886495 0.03191046 0.2298038 0.02468683 0.0005130974 0.02121345 0.0005045692
## 33 1 0.0019630407 0.03198163 0.2280068 0.02475751 0.0005119249 0.02163753 0.0005055771
## 34 1 0.0021544347 0.03206331 0.2259215 0.02483808 0.0005103678 0.02199876 0.0005046751
## 35 1 0.0023644894 0.03215302 0.2237447 0.02492549 0.0005079837 0.02227778 0.0005000449
## 36 1 0.0025950242 0.03224989 0.2216157 0.02501889 0.0005086940 0.02263539 0.0004965819
## 37 1 0.0028480359 0.03235843 0.2193553 0.02511963 0.0005104200 0.02310894 0.0004920715
## 38 1 0.0031257158 0.03248718 0.2164150 0.02523546 0.0005119287 0.02369695 0.0004877934
## 39 1 0.0034304693 0.03264161 0.2123620 0.02536902 0.0005137865 0.02440865 0.0004843599
## 40 1 0.0037649358 0.03282639 0.2067107 0.02552264 0.0005158204 0.02526941 0.0004801542
## 41 1 0.0041320124 0.03303848 0.1994647 0.02569510 0.0005141185 0.02607481 0.0004747017
## 42 1 0.0045348785 0.03325951 0.1920874 0.02587444 0.0005010392 0.02597629 0.0004658006
## 43 1 0.0049770236 0.03351078 0.1825193 0.02607501 0.0004905668 0.02617754 0.0004566565
## 44 1 0.0054622772 0.03380544 0.1685396 0.02630636 0.0004812364 0.02619900 0.0004439048
## 45 1 0.0059948425 0.03411327 0.1520260 0.02654082 0.0004697903 0.02559189 0.0004300913
## 46 1 0.0065793322 0.03438717 0.1381643 0.02673993 0.0004554340 0.02520530 0.0004116433
## 47 1 0.0072208090 0.03463785 0.1263829 0.02691716 0.0004407652 0.02483262 0.0003981921
## 48 1 0.0079248290 0.03486555 0.1169824 0.02707925 0.0004282534 0.02385177 0.0003879976
## 49 1 0.0086974900 0.03505174 0.1165510 0.02720910 0.0004168906 0.02327287 0.0003793648
## 50 1 0.0095454846 0.03527187 0.1165510 0.02736404 0.0004058780 0.02327287 0.0003692391
## 51 1 0.0104761575 0.03553518 0.1165510 0.02754949 0.0003960531 0.02327287 0.0003613042
## 52 1 0.0114975700 0.03584973 0.1165510 0.02777352 0.0003882517 0.02327287 0.0003566308
## 53 1 0.0126185688 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 54 1 0.0138488637 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 55 1 0.0151991108 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 56 1 0.0166810054 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 57 1 0.0183073828 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 58 1 0.0200923300 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 59 1 0.0220513074 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 60 1 0.0242012826 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 61 1 0.0265608778 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 62 1 0.0291505306 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 63 1 0.0319926714 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 64 1 0.0351119173 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 65 1 0.0385352859 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 66 1 0.0422924287 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 67 1 0.0464158883 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 68 1 0.0509413801 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 69 1 0.0559081018 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 70 1 0.0613590727 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 71 1 0.0673415066 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 72 1 0.0739072203 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 73 1 0.0811130831 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 74 1 0.0890215085 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 75 1 0.0977009957 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 76 1 0.1072267222 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 77 1 0.1176811952 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 78 1 0.1291549665 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 79 1 0.1417474163 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 80 1 0.1555676144 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 81 1 0.1707352647 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 82 1 0.1873817423 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 83 1 0.2056512308 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 84 1 0.2257019720 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 85 1 0.2477076356 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 86 1 0.2718588243 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 87 1 0.2983647240 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 88 1 0.3274549163 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 89 1 0.3593813664 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 90 1 0.3944206059 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 91 1 0.4328761281 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 92 1 0.4750810162 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 93 1 0.5214008288 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 94 1 0.5722367659 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 95 1 0.6280291442 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 96 1 0.6892612104 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 97 1 0.7564633276 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 98 1 0.8302175681 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 99 1 0.9111627561 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## 100 1 1.0000000000 0.03609949 NaN 0.02795295 0.0003820068 NA 0.0003529831
## Warning: Removed 48 rows containing missing values (geom_path).
## Warning: Removed 48 rows containing missing values (geom_point).
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients"
## model.coef
## (Intercept) 1.996694e+00
## x4 -4.137124e-05
## x7 1.034449e-02
## x8 9.233813e-05
## x9 2.870577e-03
## x10 5.741971e-04
## x11 8.761461e+04
## x16 5.779076e-04
## x17 1.146220e-03
## x21 2.317611e-05
## x22 -1.044726e-04
## stat8 9.793975e-05
## stat11 -4.137761e-05
## stat13 -2.486323e-05
## stat14 -5.273525e-04
## stat20 -1.427208e-04
## stat22 -1.079581e-04
## stat23 9.747235e-05
## stat25 -2.270957e-04
## stat26 -2.619115e-05
## stat33 -4.712003e-06
## stat35 -9.721027e-06
## stat38 2.454631e-05
## stat41 -2.393317e-04
## stat50 9.577163e-06
## stat60 2.466481e-04
## stat64 -1.366805e-05
## stat70 5.113107e-06
## stat72 7.652685e-06
## stat78 -9.314531e-06
## stat84 -2.126197e-05
## stat91 -8.573195e-05
## stat92 -7.344461e-05
## stat98 3.220899e-03
## stat100 4.566188e-04
## stat101 -5.038632e-05
## stat104 -7.992222e-05
## stat106 -4.824548e-04
## stat107 -1.236599e-05
## stat110 -3.045238e-03
## stat114 1.763431e-04
## stat121 -3.747748e-06
## stat130 6.530579e-05
## stat131 1.333318e-04
## stat141 1.671148e-05
## stat144 1.089764e-04
## stat146 -4.134722e-06
## stat148 -7.558066e-05
## stat149 -3.363027e-04
## stat156 2.290536e-06
## stat172 3.790967e-05
## stat198 -1.052017e-04
## stat202 -9.461851e-06
## stat204 -1.098365e-04
## stat205 -3.245734e-06
## stat207 2.565348e-05
## stat217 2.841715e-04
## x18.sqrt 2.575491e-02
if (algo.LASSO.caret == TRUE){
test.model(model.LASSO.caret, data.test
,method = 'glmnet',subopt = "LASSO"
,formula = formula, feature.names = feature.names, label.names = label.names
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.047 2.086 2.098 2.097 2.110 2.142
## [1] "glmnet LASSO Test MSE: 0.00104636710590436"
if (algo.LARS.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "lars"
,subopt = 'NULL'
,feature.names = feature.names)
model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo, : There were missing values in resampled
## performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.364 on full training set
## Least Angle Regression
##
## 5584 samples
## 240 predictor
##
## Pre-processing: centered (240), scaled (240)
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ...
## Resampling results across tuning parameters:
##
## fraction RMSE Rsquared MAE
## 0.00000000 0.03609949 NaN 0.02795295
## 0.01010101 0.03566557 0.1165510 0.02764327
## 0.02020202 0.03527735 0.1165510 0.02736835
## 0.03030303 0.03493635 0.1165510 0.02712923
## 0.04040404 0.03464739 0.1254073 0.02692472
## 0.05050505 0.03438032 0.1381797 0.02673439
## 0.06060606 0.03414114 0.1501521 0.02656010
## 0.07070707 0.03391455 0.1623008 0.02638900
## 0.08080808 0.03369818 0.1738672 0.02622237
## 0.09090909 0.03349326 0.1832073 0.02606104
## 0.10101010 0.03330171 0.1905653 0.02590895
## 0.11111111 0.03312627 0.1963870 0.02576632
## 0.12121212 0.03296232 0.2018369 0.02563184
## 0.13131313 0.03280479 0.2073230 0.02550340
## 0.14141414 0.03265749 0.2118278 0.02538105
## 0.15151515 0.03252057 0.2155091 0.02526351
## 0.16161616 0.03239417 0.2185031 0.02515067
## 0.17171717 0.03227968 0.2209178 0.02504550
## 0.18181818 0.03217643 0.2231069 0.02494708
## 0.19191919 0.03208178 0.2254545 0.02485508
## 0.20202020 0.03199751 0.2275845 0.02477338
## 0.21212121 0.03192102 0.2295372 0.02469809
## 0.22222222 0.03185272 0.2312496 0.02463092
## 0.23232323 0.03179101 0.2328921 0.02457070
## 0.24242424 0.03173894 0.2343804 0.02451979
## 0.25252525 0.03169174 0.2358430 0.02447261
## 0.26262626 0.03165027 0.2371267 0.02442946
## 0.27272727 0.03161477 0.2382017 0.02439063
## 0.28282828 0.03158465 0.2390975 0.02435589
## 0.29292929 0.03156131 0.2397304 0.02432734
## 0.30303030 0.03154426 0.2401402 0.02430589
## 0.31313131 0.03153357 0.2403025 0.02429209
## 0.32323232 0.03152648 0.2403301 0.02428206
## 0.33333333 0.03152124 0.2403120 0.02427512
## 0.34343434 0.03151794 0.2402388 0.02427064
## 0.35353535 0.03151584 0.2401449 0.02426747
## 0.36363636 0.03151477 0.2400246 0.02426494
## 0.37373737 0.03151492 0.2398695 0.02426338
## 0.38383838 0.03151559 0.2397014 0.02426297
## 0.39393939 0.03151662 0.2395289 0.02426260
## 0.40404040 0.03151846 0.2393311 0.02426280
## 0.41414141 0.03152107 0.2391051 0.02426372
## 0.42424242 0.03152397 0.2388753 0.02426502
## 0.43434343 0.03152672 0.2386625 0.02426609
## 0.44444444 0.03152932 0.2384663 0.02426674
## 0.45454545 0.03153226 0.2382612 0.02426769
## 0.46464646 0.03153567 0.2380423 0.02426885
## 0.47474747 0.03153950 0.2378089 0.02427091
## 0.48484848 0.03154365 0.2375640 0.02427335
## 0.49494949 0.03154826 0.2373028 0.02427660
## 0.50505051 0.03155319 0.2370297 0.02428035
## 0.51515152 0.03155878 0.2367309 0.02428431
## 0.52525253 0.03156465 0.2364245 0.02428820
## 0.53535354 0.03157072 0.2361133 0.02429204
## 0.54545455 0.03157691 0.2358014 0.02429580
## 0.55555556 0.03158331 0.2354857 0.02430017
## 0.56565657 0.03158979 0.2351707 0.02430474
## 0.57575758 0.03159641 0.2348546 0.02430948
## 0.58585859 0.03160321 0.2345359 0.02431457
## 0.59595960 0.03161030 0.2342077 0.02431988
## 0.60606061 0.03161771 0.2338699 0.02432557
## 0.61616162 0.03162541 0.2335226 0.02433144
## 0.62626263 0.03163339 0.2331664 0.02433765
## 0.63636364 0.03164139 0.2328144 0.02434405
## 0.64646465 0.03164954 0.2324592 0.02435060
## 0.65656566 0.03165788 0.2320997 0.02435756
## 0.66666667 0.03166654 0.2317286 0.02436460
## 0.67676768 0.03167517 0.2313637 0.02437146
## 0.68686869 0.03168407 0.2309900 0.02437848
## 0.69696970 0.03169335 0.2306032 0.02438591
## 0.70707071 0.03170273 0.2302172 0.02439330
## 0.71717172 0.03171236 0.2298240 0.02440090
## 0.72727273 0.03172218 0.2294265 0.02440860
## 0.73737374 0.03173201 0.2290335 0.02441627
## 0.74747475 0.03174192 0.2286420 0.02442403
## 0.75757576 0.03175181 0.2282564 0.02443177
## 0.76767677 0.03176183 0.2278701 0.02443962
## 0.77777778 0.03177212 0.2274756 0.02444771
## 0.78787879 0.03178292 0.2270621 0.02445607
## 0.79797980 0.03179420 0.2266313 0.02446498
## 0.80808081 0.03180565 0.2261971 0.02447410
## 0.81818182 0.03181727 0.2257601 0.02448342
## 0.82828283 0.03182878 0.2253327 0.02449279
## 0.83838384 0.03184023 0.2249118 0.02450211
## 0.84848485 0.03185171 0.2244942 0.02451172
## 0.85858586 0.03186343 0.2240716 0.02452158
## 0.86868687 0.03187526 0.2236483 0.02453156
## 0.87878788 0.03188728 0.2232210 0.02454163
## 0.88888889 0.03189950 0.2227898 0.02455175
## 0.89898990 0.03191190 0.2223547 0.02456194
## 0.90909091 0.03192432 0.2219221 0.02457206
## 0.91919192 0.03193696 0.2214846 0.02458218
## 0.92929293 0.03194995 0.2210355 0.02459244
## 0.93939394 0.03196324 0.2205769 0.02460289
## 0.94949495 0.03197674 0.2201144 0.02461360
## 0.95959596 0.03199047 0.2196461 0.02462446
## 0.96969697 0.03200441 0.2191727 0.02463536
## 0.97979798 0.03201847 0.2186988 0.02464633
## 0.98989899 0.03203271 0.2182209 0.02465734
## 1.00000000 0.03204715 0.2177384 0.02466849
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3636364.
## fraction
## 37 0.3636364
## Warning: Removed 1 rows containing missing values (geom_point).
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients"
## x4 x7 x8 x9 x10 x11 x16 x17
## -1.946025e-03 6.930017e-03 2.469699e-04 3.707120e-03 7.812485e-04 4.904787e-04 1.138580e-03 1.496156e-03
## x21 x22 stat8 stat11 stat13 stat14 stat20 stat22
## 2.131980e-04 -1.102560e-04 1.481560e-04 -4.805210e-05 -2.570686e-05 -9.007458e-04 -2.292286e-04 -1.674102e-04
## stat23 stat25 stat26 stat38 stat41 stat60 stat84 stat91
## 1.524426e-04 -3.746489e-04 -2.690641e-05 2.165161e-05 -3.988136e-04 4.056540e-04 -1.386199e-05 -1.289322e-04
## stat92 stat98 stat100 stat101 stat104 stat106 stat110 stat114
## -1.058285e-04 5.638536e-03 7.692196e-04 -6.517484e-05 -1.188403e-04 -8.223335e-04 -5.260326e-03 2.869453e-04
## stat130 stat131 stat141 stat144 stat148 stat149 stat172 stat198
## 9.390747e-05 2.120464e-04 1.183249e-05 1.703018e-04 -1.134706e-04 -5.585503e-04 4.455220e-05 -1.606406e-04
## stat204 stat207 stat217 x18.sqrt
## -1.697084e-04 2.404899e-05 4.747888e-04 1.171761e-02
if (algo.LARS.caret == TRUE){
test.model(model.LARS.caret, data.test
,method = 'lars',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.047 2.086 2.098 2.097 2.110 2.142
## [1] "lars Test MSE: 0.00104609142792686"
sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
##
## Matrix products: default
##
## locale:
## [1] LC_COLLATE=English_United States.1252 LC_CTYPE=English_United States.1252 LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C LC_TIME=English_United States.1252
##
## attached base packages:
## [1] parallel stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] knitr_1.21 htmltools_0.3.6 reshape2_1.4.3 lars_1.2
## [5] doParallel_1.0.14 iterators_1.0.10 caret_6.0-81 leaps_3.0
## [9] ggforce_0.1.3 rlist_0.4.6.1 car_3.0-2 carData_3.0-2
## [13] bestNormalize_1.3.0 scales_1.0.0 onewaytests_2.0 caTools_1.17.1.1
## [17] mosaic_1.5.0 mosaicData_0.17.0 ggformula_0.9.1 ggstance_0.3.1
## [21] lattice_0.20-38 DT_0.5 ggiraphExtra_0.2.9 ggiraph_0.6.0
## [25] investr_1.4.0 glmnet_2.0-16 foreach_1.4.4 Matrix_1.2-15
## [29] MASS_7.3-51.1 PerformanceAnalytics_1.5.2 xts_0.11-2 zoo_1.8-4
## [33] forcats_0.3.0 stringr_1.4.0 dplyr_0.8.0 purrr_0.3.0
## [37] readr_1.3.1 tidyr_0.8.2 tibble_2.0.1 ggplot2_3.1.0
## [41] tidyverse_1.2.1 usdm_1.1-18 raster_2.8-19 sp_1.3-1
## [45] pacman_0.5.0
##
## loaded via a namespace (and not attached):
## [1] readxl_1.2.0 backports_1.1.3 plyr_1.8.4 lazyeval_0.2.1 splines_3.5.2 mycor_0.1.1
## [7] crosstalk_1.0.0 leaflet_2.0.2 digest_0.6.18 magrittr_1.5 mosaicCore_0.6.0 openxlsx_4.1.0
## [13] recipes_0.1.4 modelr_0.1.3 gower_0.1.2 colorspace_1.4-0 rvest_0.3.2 ggrepel_0.8.0
## [19] haven_2.0.0 xfun_0.4 crayon_1.3.4 jsonlite_1.6 survival_2.43-3 glue_1.3.0
## [25] registry_0.5 gtable_0.2.0 ppcor_1.1 ipred_0.9-8 sjmisc_2.7.7 abind_1.4-5
## [31] rngtools_1.3.1 bibtex_0.4.2 Rcpp_1.0.0 xtable_1.8-3 units_0.6-2 foreign_0.8-71
## [37] stats4_3.5.2 lava_1.6.5 prodlim_2018.04.18 prediction_0.3.6.2 htmlwidgets_1.3 httr_1.4.0
## [43] RColorBrewer_1.1-2 pkgconfig_2.0.2 farver_1.1.0 nnet_7.3-12 labeling_0.3 tidyselect_0.2.5
## [49] rlang_0.3.1 later_0.8.0 munsell_0.5.0 cellranger_1.1.0 tools_3.5.2 cli_1.0.1
## [55] generics_0.0.2 moments_0.14 sjlabelled_1.0.16 broom_0.5.1 evaluate_0.13 ggdendro_0.1-20
## [61] yaml_2.2.0 ModelMetrics_1.2.2 zip_1.0.0 nlme_3.1-137 doRNG_1.7.1 mime_0.6
## [67] xml2_1.2.0 compiler_3.5.2 rstudioapi_0.9.0 curl_3.3 tweenr_1.0.1 stringi_1.3.1
## [73] highr_0.7 gdtools_0.1.7 stringdist_0.9.5.1 pillar_1.3.1 data.table_1.12.0 bitops_1.0-6
## [79] httpuv_1.4.5.1 R6_2.4.0 promises_1.0.1 gridExtra_2.3 rio_0.5.16 codetools_0.2-15
## [85] assertthat_0.2.0 pkgmaker_0.27 withr_2.1.2 nortest_1.0-4 mgcv_1.8-26 hms_0.4.2
## [91] quadprog_1.5-5 grid_3.5.2 rpart_4.1-13 timeDate_3043.102 class_7.3-14 rmarkdown_1.11
## [97] snakecase_0.9.2 shiny_1.2.0 lubridate_1.7.4